From fc0aa9c59c12818d17e7b54ea5910ce2ff9adad3 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 3 May 2023 00:00:59 +0000 Subject: [PATCH 01/38] First pass at replacing fill_jinja_template with set_template --- Externals.cfg | 9 ++++ docs/UsersGuide/source/RocotoInfo.rst | 2 +- docs/UsersGuide/source/RunSRW.rst | 2 +- scripts/exregional_run_met_pb2nc_obs.sh | 22 +++++---- scripts/exregional_run_met_pcpcombine.sh | 23 +++++---- ush/create_aqm_rc_file.py | 28 +++++------ ush/create_diag_table_file.py | 45 +++++++++--------- ush/create_model_configure_file.py | 59 +++++++++++++----------- ush/create_nems_configure_file.py | 42 +++++++++-------- ush/generate_FV3LAM_wflow.py | 11 +++-- 10 files changed, 140 insertions(+), 103 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 541bff335e..a431652733 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -62,5 +62,14 @@ hash = 0a86f73 local_path = sorc/AQM-utils required = True +[uwtools] +protocol = git +repo_url = https://github.com/ufs-community/workflow-tools +# Specify either a branch name or a hash but not both. +#branch = develop +hash = 04da24c +local_path = ush/python_utils/uwtools +required = True + [externals_description] schema_version = 1.0.0 diff --git a/docs/UsersGuide/source/RocotoInfo.rst b/docs/UsersGuide/source/RocotoInfo.rst index 7557addc7d..ad0ec024c9 100644 --- a/docs/UsersGuide/source/RocotoInfo.rst +++ b/docs/UsersGuide/source/RocotoInfo.rst @@ -11,7 +11,7 @@ system as the task dependencies allow and runs one instance of the workflow for The SRW App workflow is defined in a Jinja-enabled Rocoto XML template called ``FV3LAM_wflow.xml``, which resides in the ``parm`` directory. When the ``generate_FV3LAM_wflow.py`` -script is run, the ``fill_jinja_template.py`` script is called, and the parameters in the template file +script is run, the ``set_template`` uwtool is called, and the parameters in the template file are filled in. The completed file contains the workflow task names, parameters needed by the job scheduler, and task interdependencies. The generated XML file is then copied to the experiment directory: ``$EXPTDIR/FV3LAM_wflow.xml``. diff --git a/docs/UsersGuide/source/RunSRW.rst b/docs/UsersGuide/source/RunSRW.rst index a91b444892..f47eea4cf5 100644 --- a/docs/UsersGuide/source/RunSRW.rst +++ b/docs/UsersGuide/source/RunSRW.rst @@ -851,7 +851,7 @@ The generated workflow will appear in ``$EXPTDIR``, where ``EXPTDIR=${EXPT_BASED .. _WorkflowGeneration: .. figure:: _static/SRW_regional_workflow_gen.png - :alt: Flowchart of the workflow generation process. Scripts are called in the following order: source_util_funcs.sh (which calls bash_utils), then set_FV3nml_sfc_climo_filenames.py, set_FV3nml_ens_stoch_seeds.py, create_diag_table_file.py, and setup.py. setup.py calls several scripts: set_cycle_dates.py, set_grid_params_GFDLgrid.py, set_grid_params_ESGgrid.py, link_fix.py, set_ozone_param.py, set_thompson_mp_fix_files.py, config_defaults.yaml, config.yaml, and valid_param_vals.yaml. Then, it sets a number of variables, including FIXgsm, TOPO_DIR, and SFC_CLIMO_INPUT_DIR variables. Next, set_predef_grid_params.py is called, and the FIXam and FIXLAM directories are set, along with the forecast input files. The setup script also calls set_extrn_mdl_params.py, sets the GRID_GEN_METHOD with HALO, checks various parameters, and generates shell scripts. Then, the workflow generation script sets up YAML-compliant strings and generates the actual Rocoto workflow XML file from the template file (fill_jinja_template.py). The workflow generation script checks the crontab file and, if applicable, copies certain fix files to the experiment directory. Then, it copies templates of various input files to the experiment directory and sets parameters for the input.nml file. Finally, it generates the workflow. Additional information on each step appears in comments within each script. + :alt: Flowchart of the workflow generation process. Scripts are called in the following order: source_util_funcs.sh (which calls bash_utils), then set_FV3nml_sfc_climo_filenames.py, set_FV3nml_ens_stoch_seeds.py, create_diag_table_file.py, and setup.py. setup.py calls several scripts: set_cycle_dates.py, set_grid_params_GFDLgrid.py, set_grid_params_ESGgrid.py, link_fix.py, set_ozone_param.py, set_thompson_mp_fix_files.py, config_defaults.yaml, config.yaml, and valid_param_vals.yaml. Then, it sets a number of variables, including FIXgsm, TOPO_DIR, and SFC_CLIMO_INPUT_DIR variables. Next, set_predef_grid_params.py is called, and the FIXam and FIXLAM directories are set, along with the forecast input files. The setup script also calls set_extrn_mdl_params.py, sets the GRID_GEN_METHOD with HALO, checks various parameters, and generates shell scripts. Then, the workflow generation script produces a YAML configuration file and generates the actual Rocoto workflow XML file from the template file (by calling uwtools set_template). The workflow generation script checks the crontab file and, if applicable, copies certain fix files to the experiment directory. Then, it copies templates of various input files to the experiment directory and sets parameters for the input.nml file. Finally, it generates the workflow. Additional information on each step appears in comments within each script. *Experiment Generation Description* diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 19e2072879..ef2306f031 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -256,25 +256,31 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX)") +cat > $tmpfile << EOF +$settings +EOF # # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ +$USHdir/templater.py -q \ + -i ${metplus_config_tmpl_fp} \ + -c ${tmpfile} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Namelist settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index cb2a7b28f3..376bad0925 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -312,25 +312,32 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX)") +cat > $tmpfile << EOF +$settings +EOF + # # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ +python3 templater.py -q \ + -i ${metplus_config_tmpl_fp} \ + -c ${tmpfile} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Namelist settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 926278de7c..4aea1551f5 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -6,6 +6,7 @@ import unittest from datetime import datetime from textwrap import dedent +import tempfile from python_utils import ( import_vars, @@ -20,7 +21,7 @@ flatten_dict ) -from fill_jinja_template import fill_jinja_template +from templater import set_template def create_aqm_rc_file(cdate, run_dir, init_concentrations): """ Creates an aqm.rc file in the specified run directory @@ -114,15 +115,17 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # #----------------------------------------------------------------------- # + with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings") as tmpfile: + tmpfile.write(settings_str) try: - fill_jinja_template( + set_template( [ - "-q", - "-u", - settings_str, - "-t", - AQM_RC_TMPL_FP, - "-o", + "-q", + "-c", + tmpfile, + "-i", + AQM_RC_TMPL_FP, + "-o", aqm_rc_fp, ] ) @@ -130,16 +133,15 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): print_err_msg_exit( dedent( f""" - Call to python script fill_jinja_template.py to create a \"{AQM_RC_FN}\" + Call to uwtools set_template to create a \"{AQM_RC_FN}\" file from a jinja2 template failed. Parameters passed to this script are: Full path to template aqm.rc file: AQM_RC_TMPL_FP = \"{AQM_RC_TMPL_FP}\" Full path to output aqm.rc file: aqm_rc_fp = \"{aqm_rc_fp}\" - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str + Full path to configuration file: + {tmpfile} + """ ) return False diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 15a42729f4..d5b3a04cf2 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -5,6 +5,7 @@ import argparse import unittest from textwrap import dedent +import tempfile from python_utils import ( import_vars, @@ -17,7 +18,7 @@ flatten_dict, ) -from fill_jinja_template import fill_jinja_template +from templater import set_template def create_diag_table_file(run_dir): @@ -72,27 +73,29 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - # call fill jinja - try: - fill_jinja_template( - ["-q", "-u", settings_str, "-t", DIAG_TABLE_TMPL_FP, "-o", diag_table_fp] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script fill_jinja_template.py to create a '{DIAG_TABLE_FN}' - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template diag table file: - DIAG_TABLE_TMPL_FP = '{DIAG_TABLE_TMPL_FP}' - Full path to output diag table file: - diag_table_fp = '{diag_table_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" + with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings") as tmpfile: + tmpfile.write(settings_str) + try: + set_template( + ["-q", "-c", tmfile, "-i", DIAG_TABLE_TMPL_FP, "-o", diag_table_fp] ) - + settings_str - ) - return False + except: + print_err_msg_exit( + dedent( + f""" + Call to uwtools set_template to create a '{DIAG_TABLE_FN}' + file from a jinja2 template failed. Parameters passed to this script are: + Full path to template diag table file: + DIAG_TABLE_TMPL_FP = '{DIAG_TABLE_TMPL_FP}' + Full path to output diag table file: + diag_table_fp = '{diag_table_fp}' + Full path to configuration file: + {tmpfile} + """ + ) + + settings_str + ) + return False return True diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 47e1ba3daf..18b9648bae 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -6,6 +6,7 @@ import unittest from datetime import datetime from textwrap import dedent +import tempfile from python_utils import ( import_vars, @@ -20,7 +21,7 @@ flatten_dict, ) -from fill_jinja_template import fill_jinja_template +from templater import set_template def create_model_configure_file( @@ -204,34 +205,36 @@ def create_model_configure_file( # model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - try: - fill_jinja_template( - [ - "-q", - "-u", - settings_str, - "-t", - MODEL_CONFIG_TMPL_FP, - "-o", - model_config_fp, - ] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script fill_jinja_template.py to create a '{MODEL_CONFIG_FN}' - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template model config file: - MODEL_CONFIG_TMPL_FP = '{MODEL_CONFIG_TMPL_FP}' - Full path to output model config file: - model_config_fp = '{model_config_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" + with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="model_config_settings") as tmpfile: + tmpfile.write(settings_str) + try: + set_template( + [ + "-q", + "-c", + tmpfile, + "-i", + MODEL_CONFIG_TMPL_FP, + "-o", + model_config_fp, + ] ) - + settings_str - ) - return False + except: + print_err_msg_exit( + dedent( + f""" + Call to uwtools set_template to create a '{MODEL_CONFIG_FN}' + file from a jinja2 template failed. Parameters passed to this script are: + Full path to template model config file: + MODEL_CONFIG_TMPL_FP = '{MODEL_CONFIG_TMPL_FP}' + Full path to output model config file: + model_config_fp = '{model_config_fp}' + Full path to configuration file: + {tmpfile} + """ + ) + ) + return False return True diff --git a/ush/create_nems_configure_file.py b/ush/create_nems_configure_file.py index f1837f54a2..1267eb084d 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_nems_configure_file.py @@ -5,6 +5,7 @@ import argparse import unittest from datetime import datetime +import tempfile from textwrap import dedent from python_utils import ( @@ -20,7 +21,7 @@ flatten_dict, ) -from fill_jinja_template import fill_jinja_template +from templater import set_template def create_nems_configure_file(run_dir): """ Creates a nems configuration file in the specified @@ -89,24 +90,29 @@ def create_nems_configure_file(run_dir): # #----------------------------------------------------------------------- # - try: - fill_jinja_template(["-q", "-u", settings_str, "-t", NEMS_CONFIG_TMPL_FP, "-o", nems_config_fp]) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script fill_jinja_template.py to create the nems.configure - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template nems.configure file: - NEMS_CONFIG_TMPL_FP = \"{NEMS_CONFIG_TMPL_FP}\" - Full path to output nems.configure file: - nems_config_fp = \"{nems_config_fp}\" - Namelist settings specified on command line:\n - settings =\n\n""" + # Store the settings in a temporary file + with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="nems_config_settings") as tmpfile: + tmpfile.write(settings_str) + + try: + set_templater(["-q", "-c", $tmpfile, "-i", NEMS_CONFIG_TMPL_FP, "-o", nems_config_fp]) + except: + print_err_msg_exit( + dedent( + f""" + Call to uwtools set_templater to create the nems.configure + file from a jinja2 template failed. Parameters passed to this script are: + Full path to template nems.configure file: + NEMS_CONFIG_TMPL_FP = \"{NEMS_CONFIG_TMPL_FP}\" + Full path to output nems.configure file: + nems_config_fp = \"{nems_config_fp}\" + Full path to configuration file: + {tmpfile} + + """ + ) ) - + settings_str - ) - return False + return False return True diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 3e3d689dc7..30828a3e40 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -35,7 +35,7 @@ from setup import setup from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames from get_crontab_contents import add_crontab_line -from fill_jinja_template import fill_jinja_template +from templater import set_template from set_namelist import set_namelist from check_python_version import check_python_version @@ -115,18 +115,19 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] args = ["-o", wflow_xml_fp, - "-t", template_xml_fp, - "-c", rocoto_yaml_fp ] + "-i", template_xml_fp, + "-c", rocoto_yaml_fp, + "-d"] if not debug: args.append("-q") try: - fill_jinja_template(args) + set_template(args) except: raise Exception( dedent( f""" - Call to fill_jinja_template failed. + Call to uwtools set_template failed. """ ) ) From 62b6595e2d6b25bf95a3f4f93d03400d09027b7f Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 3 May 2023 00:05:38 +0000 Subject: [PATCH 02/38] Removing fill_jinja_template.py --- ush/fill_jinja_template.py | 302 ------------------------------------- 1 file changed, 302 deletions(-) delete mode 100755 ush/fill_jinja_template.py diff --git a/ush/fill_jinja_template.py b/ush/fill_jinja_template.py deleted file mode 100755 index f810136753..0000000000 --- a/ush/fill_jinja_template.py +++ /dev/null @@ -1,302 +0,0 @@ -#!/usr/bin/env python3 - -""" -This utility fills in a user-supplied Jinja template from either a YAML file, or -command line arguments. - -The user configuration file and commandline arguments should be YAML-formatted. -This script will support a single- or two-level YAML config file. For example: - - 1. expt1: - date_first_cycl: !datetime 2019043000 - date_last_cycl: !datetime 2019050100 - cycl_freq: !!str 12:00:00 - - expt2: - date_first_cycl: !datetime 2019061012 - date_last_cycl: !datetime 2019061212 - cycl_freq: !!str 12:00:00 - - 2. date_first_cycl: !datetime 2019043000 - date_last_cycl: !datetime 2019050100 - cycl_freq: !!str 12:00:00 - - In Case 1, provide the name of the file and the section title, e.g. expt2, - to the -c command line argument. Only provide the name of the file in -c - option if it's configured as in Case 2 above. - - -Supported YAML Tags: - - The script supports additional YAML configuration tags. - - !datetime Converts an input string formatted as YYYYMMDDHH[mm[ss]] to a - Python datetime object - !join Uses os.path.join to join a list as a path. - -Expected behavior: - - - The template file is required. Script fails if not provided. - - Command line arguments in the -u setting override the -c settings. - -""" - -import datetime as dt -import os -import sys - -import argparse -import jinja2 as j2 -from jinja2 import meta -import yaml - - -def join(loader, node): - - """Uses os to join a list as a path.""" - - return os.path.join(*loader.construct_sequence(node)) - - -def to_datetime(loader, node): - - """Converts a date string with format YYYYMMDDHH[MM[SS]] to a datetime - object.""" - - value = loader.construct_scalar(node) - val_len = len(value) - - # Check that the input string contains only numbers and is expected length. - if val_len not in [10, 12, 14] or not value.isnumeric(): - msg = f"{value} does not conform to input format YYYYMMDDHH[MM[SS]]" - raise ValueError(msg) - - # Use a subset of the string corresponding to the input length of the string - # 2 chosen here since Y is a 4 char year. - date_format = "%Y%m%d%H%M%S"[0 : val_len - 2] - - return dt.datetime.strptime(value, date_format) - - -yaml.add_constructor("!datetime", to_datetime, Loader=yaml.SafeLoader) -yaml.add_constructor("!join", join, Loader=yaml.SafeLoader) - - -def file_exists(arg): - - """Checks whether a file exists, and returns the path if it does.""" - - if not os.path.exists(arg): - msg = f"{arg} does not exist!" - raise argparse.ArgumentTypeError(msg) - - return arg - - -def config_exists(arg): - - """ - Checks whether the config file exists and if it contains the input - section. Returns the config as a Python dict. - """ - - if len(arg) > 2: - msg = f"{len(arg)} arguments were provided for config. Only 2 allowed!" - raise argparse.ArgumentTypeError(msg) - - file_name = file_exists(arg[0]) - section_name = arg[1] if len(arg) == 2 else None - - # Load the YAML file into a dictionary - with open(file_name, "r") as fn: - cfg = yaml.load(fn, Loader=yaml.SafeLoader) - - if section_name: - try: - cfg = cfg[section_name] - except KeyError: - msg = f"Section {section_name} does not exist in top level of {file_name}" - raise argparse.ArgumentTypeError(msg) - - return cfg - - -def load_config(arg): - - """ - Check to ensure that the provided config file exists. If it does, load it - with YAML's safe loader and return the resulting dict. - """ - - # Check for existence of file - if not os.path.exists(arg): - msg = f"{arg} does not exist!" - raise argparse.ArgumentTypeError(msg) - - return yaml.safe_load(arg) - - -def load_str(arg): - - """Load a dict string safely using YAML. Return the resulting dict.""" - - return yaml.load(arg, Loader=yaml.SafeLoader) - - -def path_ok(arg): - - """ - Check whether the path to the file exists, and is writeable. Return the path - if it passes all checks, otherwise raise an error. - """ - - # Get the absolute path provided by arg - dir_name = os.path.abspath(os.path.dirname(arg)) - - # Ensure the arg path exists, and is writable. Raise error if not. - if os.path.lexists(dir_name) and os.access(dir_name, os.W_OK): - return arg - - msg = f"{arg} is not a writable path!" - raise argparse.ArgumentTypeError(msg) - - -def parse_args(argv): - - """ - Function maintains the arguments accepted by this script. Please see - Python's argparse documenation for more information about settings of each - argument. - """ - - parser = argparse.ArgumentParser(description="Fill in a Rocoto XML template.") - - # Optional - parser.add_argument( - "-c", - "--config", - help="Full path to a YAML user config file, and a \ - top-level section to use (optional).", - nargs="*", - type=load_config, - ) - parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="Suppress all output", - ) - parser.add_argument( - "-u", - "--user_config", - help="Command-line user config options in YAML-formatted string", - type=load_str, - ) - # Required - parser.add_argument( - "-t", - "--xml_template", - dest="template", - help="Full path to the jinja template", - required=True, - type=file_exists, - ) - parser.add_argument( - "-o", - "--outxml", - dest="outxml", - help="Full path to the output Rocoto XML file.", - required=True, - type=path_ok, - ) - return parser.parse_args(argv) - - -def update_dict(dest, newdict, quiet=False): - - """ - Overwrites all values in dest dictionary section with key/value pairs from - newdict. Does not support multi-layer update. - - Turn off print statements with quiet=True. - - Input: - dest A dict that is to be updated. - newdict A dict containing sections and keys corresponding to - those in dest and potentially additional ones, that will be used to - update the dest dict. - quiet An optional boolean flag to turn off output. - Output: - None - Result: - The dest dict is updated in place. - """ - - if not quiet: - print("*" * 50) - - for key, value in newdict.items(): - if not quiet: - print(f"Overriding {key:>20} = {value}") - - # Set key in dict - dest[key] = value - - if not quiet: - print("*" * 50) - - -def fill_jinja_template(argv, config_dict=None): - - """ - Loads a Jinja template, determines its necessary undefined variables, - retrives them from user supplied settings, and renders the final result. - """ - - # parse args - cla = parse_args(argv) - if cla.config: - cla.config = config_exists(cla.config) - - # Create a Jinja Environment to load the template. - env = j2.Environment(loader=j2.FileSystemLoader(cla.template, - encoding='utf-8')) - template_source = env.loader.get_source(env, "") - template = env.get_template("") - parsed_content = env.parse(template_source) - - # Gather all of the undefined variables in the template. - template_vars = meta.find_undeclared_variables(parsed_content) - - # Read in the config options from the provided (optional) YAML file - cfg = cla.config if cla.config is not None else {} - - if config_dict is not None: - update_dict(cfg, config_dict, quiet=cla.quiet) - - # Update cfg with (optional) command-line entries, overriding those in YAML file - if cla.user_config: - update_dict(cfg, cla.user_config, quiet=cla.quiet) - - # Loop through all the undefined Jinja template variables, and grab the - # required values from the config file. - tvars = {} - for var in template_vars: - - if cfg.get(var, "NULL") == "NULL": - raise KeyError(f"{var} does not exist in user-supplied settings!") - - if not cla.quiet: - print(f"{var:>25}: {cfg.get(var)}") - - tvars[var] = cfg.get(var) - - # Fill in XML template - xml_contents = template.render(**tvars) - with open(cla.outxml, "w") as fn: - fn.write(xml_contents) - - -if __name__ == "__main__": - - fill_jinja_template(sys.argv[1:]) From 4d052b1ce37e87d91236c4525fe77588ba2cf2a8 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 3 May 2023 02:19:04 +0000 Subject: [PATCH 03/38] Changes in place. Failing on env. Needs modulefile fix. --- modulefiles/wflow_hera.lua | 8 ++++++++ ush/create_aqm_rc_file.py | 2 +- ush/create_diag_table_file.py | 2 +- ush/create_model_configure_file.py | 2 +- ush/create_nems_configure_file.py | 2 +- ush/generate_FV3LAM_wflow.py | 4 ++-- ush/load_modules_wflow.sh | 2 +- 7 files changed, 15 insertions(+), 7 deletions(-) diff --git a/modulefiles/wflow_hera.lua b/modulefiles/wflow_hera.lua index efca665dd2..56b872870e 100644 --- a/modulefiles/wflow_hera.lua +++ b/modulefiles/wflow_hera.lua @@ -7,6 +7,14 @@ whatis([===[Loads libraries needed for running the UFS SRW App on Hera ]===]) load("rocoto") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "/../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "/../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) + prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 4aea1551f5..a4339dab71 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -21,7 +21,7 @@ flatten_dict ) -from templater import set_template +from scripts.templater import set_template def create_aqm_rc_file(cdate, run_dir, init_concentrations): """ Creates an aqm.rc file in the specified run directory diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index d5b3a04cf2..901001689e 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -18,7 +18,7 @@ flatten_dict, ) -from templater import set_template +from scripts.templater import set_template def create_diag_table_file(run_dir): diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 18b9648bae..161e1d6ec3 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -21,7 +21,7 @@ flatten_dict, ) -from templater import set_template +from scripts.templater import set_template def create_model_configure_file( diff --git a/ush/create_nems_configure_file.py b/ush/create_nems_configure_file.py index 1267eb084d..8f04edad8f 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_nems_configure_file.py @@ -21,7 +21,7 @@ flatten_dict, ) -from templater import set_template +from scripts.templater import set_template def create_nems_configure_file(run_dir): """ Creates a nems configuration file in the specified diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 30828a3e40..19dc49151b 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -35,7 +35,7 @@ from setup import setup from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames from get_crontab_contents import add_crontab_line -from templater import set_template +from scripts.templater import set_template from set_namelist import set_namelist from check_python_version import check_python_version @@ -117,7 +117,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de args = ["-o", wflow_xml_fp, "-i", template_xml_fp, "-c", rocoto_yaml_fp, - "-d"] + ] if not debug: args.append("-q") diff --git a/ush/load_modules_wflow.sh b/ush/load_modules_wflow.sh index 5e7e30e3a7..7631295d76 100755 --- a/ush/load_modules_wflow.sh +++ b/ush/load_modules_wflow.sh @@ -62,7 +62,7 @@ task failed: $has_mu && set +u if [ ! -z $(command -v conda) ]; then - conda activate regional_workflow + conda activate workflow_tools fi $has_mu && set -u From 7fdb1176b5e4d099dc5150da5d738b35e5376550 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 8 May 2023 15:30:08 +0000 Subject: [PATCH 04/38] Add pythonpath for uwtools package items. --- modulefiles/tasks/cheyenne/python_srw.lua | 7 +++++++ modulefiles/tasks/gaea/python_srw.lua | 7 +++++++ modulefiles/tasks/hera/python_srw.lua | 7 +++++++ modulefiles/tasks/jet/python_srw.lua | 7 +++++++ modulefiles/tasks/noaacloud/python_srw.lua | 7 +++++++ modulefiles/tasks/orion/python_srw.lua | 7 +++++++ modulefiles/tasks/wcoss2/python_srw.lua | 7 +++++++ 7 files changed, 49 insertions(+) diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua index 2263141a0a..3f7548609d 100644 --- a/modulefiles/tasks/cheyenne/python_srw.lua +++ b/modulefiles/tasks/cheyenne/python_srw.lua @@ -3,3 +3,10 @@ prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles" load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua index 74336ca1cf..d3cd294aed 100644 --- a/modulefiles/tasks/gaea/python_srw.lua +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -2,3 +2,10 @@ prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/hera/python_srw.lua b/modulefiles/tasks/hera/python_srw.lua index 7934169824..fb412d10ce 100644 --- a/modulefiles/tasks/hera/python_srw.lua +++ b/modulefiles/tasks/hera/python_srw.lua @@ -2,3 +2,10 @@ prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefil load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/jet/python_srw.lua b/modulefiles/tasks/jet/python_srw.lua index ef4f248966..d10b86a6b8 100644 --- a/modulefiles/tasks/jet/python_srw.lua +++ b/modulefiles/tasks/jet/python_srw.lua @@ -2,3 +2,10 @@ prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefil load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua index 602d60842f..7c972640c2 100644 --- a/modulefiles/tasks/noaacloud/python_srw.lua +++ b/modulefiles/tasks/noaacloud/python_srw.lua @@ -1 +1,8 @@ prepend_path("PATH", "/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua index 5b7b0afc57..96a80e9bfc 100644 --- a/modulefiles/tasks/orion/python_srw.lua +++ b/modulefiles/tasks/orion/python_srw.lua @@ -3,3 +3,10 @@ append_path("MODULEPATH","/work/noaa/epic-ps/role-epic-ps/miniconda3/modulefiles load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/wcoss2/python_srw.lua b/modulefiles/tasks/wcoss2/python_srw.lua index 519f1cdf4a..55b02d0a6c 100644 --- a/modulefiles/tasks/wcoss2/python_srw.lua +++ b/modulefiles/tasks/wcoss2/python_srw.lua @@ -1,3 +1,10 @@ load(pathJoin("intel", os.getenv("intel_ver"))) load(pathJoin("python", os.getenv("python_ver"))) load(pathJoin("prod_util", os.getenv("prod_util_ver"))) + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) From 1394c57514b7be29b245a742c45d5194de1f74b1 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 8 May 2023 18:22:41 +0000 Subject: [PATCH 05/38] Temp files for all calls to set_template. All files need YAML suffixes. Call to templater CLI needs full path. --- scripts/exregional_run_met_pb2nc_obs.sh | 10 +++--- scripts/exregional_run_met_pcpcombine.sh | 11 +++--- ush/create_diag_table_file.py | 28 ++++----------- ush/create_model_configure_file.py | 45 +++++++++--------------- ush/create_nems_configure_file.py | 27 ++++---------- ush/generate_FV3LAM_wflow.py | 14 +------- 6 files changed, 40 insertions(+), 95 deletions(-) diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index ef2306f031..2ad15b0a6d 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -257,7 +257,7 @@ settings="\ 'field_thresholds': '${FIELD_THRESHOLDS:-}' " # Store the settings in a temporary file -tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX)") +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") cat > $tmpfile << EOF $settings EOF @@ -265,10 +265,10 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/templater.py -q \ - -i ${metplus_config_tmpl_fp} \ - -c ${tmpfile} \ - -o ${metplus_config_fp} || \ +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -i ${metplus_config_tmpl_fp} \ + -c ${tmpfile} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ Call to uwtools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 376bad0925..5f3c73b651 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -313,7 +313,7 @@ settings="\ 'field_thresholds': '${FIELD_THRESHOLDS:-}' " # Store the settings in a temporary file -tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX)") +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") cat > $tmpfile << EOF $settings EOF @@ -322,10 +322,11 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 templater.py -q \ - -i ${metplus_config_tmpl_fp} \ - -c ${tmpfile} \ - -o ${metplus_config_fp} || \ +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -q \ + -i ${metplus_config_tmpl_fp} \ + -c ${tmpfile} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ Call to uwtools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 901001689e..4d3f7fef8b 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -73,29 +73,13 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings") as tmpfile: + with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings", suffix=".yaml") as tmpfile: tmpfile.write(settings_str) - try: - set_template( - ["-q", "-c", tmfile, "-i", DIAG_TABLE_TMPL_FP, "-o", diag_table_fp] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to uwtools set_template to create a '{DIAG_TABLE_FN}' - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template diag table file: - DIAG_TABLE_TMPL_FP = '{DIAG_TABLE_TMPL_FP}' - Full path to output diag table file: - diag_table_fp = '{diag_table_fp}' - Full path to configuration file: - {tmpfile} - """ - ) - + settings_str - ) - return False + tmpfile.seek(0) + # set_template does its own error handling + set_template( + ["-c", tmpfile.name, "-i", DIAG_TABLE_TMPL_FP, "-o", diag_table_fp] + ) return True diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 161e1d6ec3..bb70b9ee16 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -205,36 +205,23 @@ def create_model_configure_file( # model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="model_config_settings") as tmpfile: + with tempfile.NamedTemporaryFile(dir="./", + mode="w+t", + suffix=".yaml", + prefix="model_config_settings.") as tmpfile: tmpfile.write(settings_str) - try: - set_template( - [ - "-q", - "-c", - tmpfile, - "-i", - MODEL_CONFIG_TMPL_FP, - "-o", - model_config_fp, - ] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to uwtools set_template to create a '{MODEL_CONFIG_FN}' - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template model config file: - MODEL_CONFIG_TMPL_FP = '{MODEL_CONFIG_TMPL_FP}' - Full path to output model config file: - model_config_fp = '{model_config_fp}' - Full path to configuration file: - {tmpfile} - """ - ) - ) - return False + tmpfile.seek(0) + # set_template does its own error handling + set_template( + [ + "-c", + tmpfile.name, + "-i", + MODEL_CONFIG_TMPL_FP, + "-o", + model_config_fp, + ] + ) return True diff --git a/ush/create_nems_configure_file.py b/ush/create_nems_configure_file.py index 8f04edad8f..5cb3455ac4 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_nems_configure_file.py @@ -91,29 +91,14 @@ def create_nems_configure_file(run_dir): #----------------------------------------------------------------------- # # Store the settings in a temporary file - with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="nems_config_settings") as tmpfile: + with tempfile.NamedTemporaryFile(dir="./", + mode="w+t", + prefix="nems_config_settings", + suffix=".yaml") as tmpfile: tmpfile.write(settings_str) + tmpfile.seek(0) - try: - set_templater(["-q", "-c", $tmpfile, "-i", NEMS_CONFIG_TMPL_FP, "-o", nems_config_fp]) - except: - print_err_msg_exit( - dedent( - f""" - Call to uwtools set_templater to create the nems.configure - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template nems.configure file: - NEMS_CONFIG_TMPL_FP = \"{NEMS_CONFIG_TMPL_FP}\" - Full path to output nems.configure file: - nems_config_fp = \"{nems_config_fp}\" - Full path to configuration file: - {tmpfile} - - """ - ) - ) - return False - + set_template(["-c", tmpfile.name, "-i", NEMS_CONFIG_TMPL_FP, "-o", nems_config_fp]) return True def parse_args(argv): diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 19dc49151b..fa82c3a0aa 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -118,19 +118,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de "-i", template_xml_fp, "-c", rocoto_yaml_fp, ] - if not debug: - args.append("-q") - - try: - set_template(args) - except: - raise Exception( - dedent( - f""" - Call to uwtools set_template failed. - """ - ) - ) + set_template(args) # # ----------------------------------------------------------------------- # From d2d6900204b74e88a4bd4a8d457792a0e564511c Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 8 May 2023 20:56:12 +0000 Subject: [PATCH 06/38] Linting the small modified python utilities. --- .github/workflows/python_linter.yaml | 1 + ush/create_aqm_rc_file.py | 73 +++++++++++++--------------- ush/create_diag_table_file.py | 13 +++-- ush/create_model_configure_file.py | 39 ++++++--------- ush/create_nems_configure_file.py | 32 ++++++------ 5 files changed, 76 insertions(+), 82 deletions(-) diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml index 7b7cfed85b..e58562608b 100644 --- a/.github/workflows/python_linter.yaml +++ b/.github/workflows/python_linter.yaml @@ -35,3 +35,4 @@ jobs: run: | export PYTHONPATH=${PWD}/ush pylint --ignore-imports=yes tests/test_python/ + pylint --min-similarity-lines 15 ush/create_*.py diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index c94e9da894..50db26c0b1 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -1,20 +1,19 @@ #!/usr/bin/env python3 +""" +Function that creates the config file for running AQM. +""" import os import sys import argparse -from datetime import datetime from textwrap import dedent import tempfile from python_utils import ( - import_vars, - set_env_var, - print_input_args, + import_vars, + print_input_args, str_to_type, - print_info_msg, - print_err_msg_exit, - lowercase, + print_info_msg, cfg_to_yaml_str, load_shell_config, flatten_dict @@ -37,7 +36,8 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): #import all environment variables import_vars() - + #pylint: disable=undefined-variable + # #----------------------------------------------------------------------- # @@ -57,23 +57,37 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # Extract from cdate the starting year, month, and day of the forecast. # yyyymmdd=cdate.strftime('%Y%m%d') - mm=f"{cdate.month:02d}" - hh=f"{cdate.hour:02d}" + mm=f"{cdate.month:02d}" # pylint: disable=invalid-name + hh=f"{cdate.hour:02d}" # pylint: disable=invalid-name # # Set parameters in the aqm.rc file. # aqm_rc_bio_file_fp=os.path.join(AQM_BIO_DIR, AQM_BIO_FILE) - aqm_fire_file_fn=AQM_FIRE_FILE_PREFIX+"_"+yyyymmdd+"_t"+hh+"z"+AQM_FIRE_FILE_SUFFIX - aqm_rc_fire_file_fp=os.path.join(COMINext, "FIRE_EMISSION", aqm_fire_file_fn) - aqm_dust_file_fn=AQM_DUST_FILE_PREFIX+"_"+PREDEF_GRID_NAME+AQM_DUST_FILE_SUFFIX - aqm_rc_dust_file_fp=os.path.join(AQM_DUST_DIR, aqm_dust_file_fn) - aqm_canopy_file_fn=AQM_CANOPY_FILE_PREFIX+"."+mm+AQM_CANOPY_FILE_SUFFIX - aqm_rc_canopy_file_fp=os.path.join(AQM_CANOPY_DIR, PREDEF_GRID_NAME, aqm_canopy_file_fn) + + # Fire config + aqm_rc_fire_file_fp=os.path.join( + COMINext, + "FIRE_EMISSION", + f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" + ) + + # Dust config + aqm_rc_dust_file_fp=os.path.join( + AQM_DUST_DIR, + f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", + ) + + # Canopy config + aqm_rc_canopy_file_fp=os.path.join( + AQM_CANOPY_DIR, + PREDEF_GRID_NAME, + f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", + ) # #----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the jinja variables in the template + # specifying the values that the jinja variables in the template # AQM_RC_TMPL_FN file should be set to. # #----------------------------------------------------------------------- @@ -95,14 +109,14 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY } settings_str = cfg_to_yaml_str(settings) - + print_info_msg( dedent( f""" The variable \"settings\" specifying values to be used in the \"{AQM_RC_FN}\" file has been set as follows:\n settings =\n\n""" - ) + ) + settings_str, verbose=VERBOSE, ) @@ -116,7 +130,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings") as tmpfile: tmpfile.write(settings_str) - try: + tmpfile.seek(0) set_template( [ "-q", @@ -128,22 +142,6 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): aqm_rc_fp, ] ) - except: - print_err_msg_exit( - dedent( - f""" - Call to uwtools set_template to create a \"{AQM_RC_FN}\" - file from a jinja2 template failed. Parameters passed to this script are: - Full path to template aqm.rc file: - AQM_RC_TMPL_FP = \"{AQM_RC_TMPL_FP}\" - Full path to output aqm.rc file: - aqm_rc_fp = \"{aqm_rc_fp}\" - Full path to configuration file: - {tmpfile} - """ - ) - return False - return True def parse_args(argv): @@ -180,6 +178,5 @@ def parse_args(argv): create_aqm_rc_file( run_dir=args.run_dir, cdate=str_to_type(args.cdate), - init_concentrations=str_to_type(args.init_concentrations), + init_concentrations=str_to_type(args.init_concentrations), ) - diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 4b5fc98156..8df137262e 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -1,17 +1,20 @@ #!/usr/bin/env python3 +""" +Function to create a diag_table file for the FV3 model using a +template. +""" import os import sys import argparse from textwrap import dedent import tempfile + from python_utils import ( import_vars, - set_env_var, print_input_args, print_info_msg, - print_err_msg_exit, cfg_to_yaml_str, load_shell_config, flatten_dict, @@ -34,6 +37,7 @@ def create_diag_table_file(run_dir): # import all environment variables import_vars() + #pylint: disable=undefined-variable # create a diagnostic table file within the specified run directory print_info_msg( f""" @@ -72,7 +76,10 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings", suffix=".yaml") as tmpfile: + with tempfile.NamedTemporaryFile(dir="./", + mode="w+t", + prefix="aqm_rc_settings", + suffix=".yaml") as tmpfile: tmpfile.write(settings_str) tmpfile.seek(0) # set_template does its own error handling diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 4299605565..b8ef483cb4 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -1,19 +1,19 @@ #!/usr/bin/env python3 - +""" +Create a model_configure file for the FV3 forecast model from a +template. +""" import os import sys import argparse -from datetime import datetime from textwrap import dedent import tempfile from python_utils import ( import_vars, - set_env_var, print_input_args, str_to_type, print_info_msg, - print_err_msg_exit, lowercase, cfg_to_yaml_str, load_shell_config, @@ -25,7 +25,7 @@ def create_model_configure_file( cdate, fcst_len_hrs, fhrot, run_dir, sub_hourly_post, dt_subhourly_post_mnts, dt_atmos -): + ): #pylint: disable=too-many-arguments """Creates a model configuration file in the specified run directory @@ -46,6 +46,8 @@ def create_model_configure_file( # import all environment variables import_vars() + # pylint: disable=undefined-variable + # # ----------------------------------------------------------------------- # @@ -61,18 +63,6 @@ def create_model_configure_file( verbose=VERBOSE, ) # - # Extract from cdate the starting year, month, day, and hour of the forecast. - # - yyyy = cdate.year - mm = cdate.month - dd = cdate.day - hh = cdate.hour - # - # Set parameters in the model configure file. - # - dot_quilting_dot=f".{lowercase(str(QUILTING))}." - dot_write_dopost=f".{lowercase(str(WRITE_DOPOST))}." - # # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string @@ -82,16 +72,16 @@ def create_model_configure_file( # ----------------------------------------------------------------------- # settings = { - "start_year": yyyy, - "start_month": mm, - "start_day": dd, - "start_hour": hh, + "start_year": cdate.year, + "start_month": cdate.month, + "start_day": cdate.day, + "start_hour": cdate.hour, "nhours_fcst": fcst_len_hrs, "fhrot": fhrot, "dt_atmos": DT_ATMOS, "restart_interval": RESTART_INTERVAL, - "write_dopost": dot_write_dopost, - "quilting": dot_quilting_dot, + "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", + "quilting": f".{lowercase(str(WRITE_DOPOST))}.", "output_grid": WRTCMP_output_grid, } # @@ -127,8 +117,7 @@ def create_model_configure_file( } ) elif ( - WRTCMP_output_grid == "regional_latlon" - or WRTCMP_output_grid == "rotated_latlon" + WRTCMP_output_grid in ("regional_latlon", "rotated_latlon") ): settings.update( { diff --git a/ush/create_nems_configure_file.py b/ush/create_nems_configure_file.py index 166781711f..fc447a9bb2 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_nems_configure_file.py @@ -1,20 +1,20 @@ #!/usr/bin/env python3 +""" +Function to create a NEMS configuration file for the FV3 forecast +model(s) from a template. +""" + import os import sys import argparse -from datetime import datetime import tempfile from textwrap import dedent from python_utils import ( - import_vars, - set_env_var, - print_input_args, - str_to_type, - print_info_msg, - print_err_msg_exit, - lowercase, + import_vars, + print_input_args, + print_info_msg, cfg_to_yaml_str, load_shell_config, flatten_dict, @@ -36,7 +36,9 @@ def create_nems_configure_file(run_dir): #import all environment variables import_vars() - + + # pylint: disable=undefined-variable + # #----------------------------------------------------------------------- # @@ -57,7 +59,7 @@ def create_nems_configure_file(run_dir): #----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the jinja variables in the template + # specifying the values that the jinja variables in the template # model_configure file should be set to. # #----------------------------------------------------------------------- @@ -70,15 +72,15 @@ def create_nems_configure_file(run_dir): "atm_omp_num_threads": OMP_NUM_THREADS_RUN_FCST, } settings_str = cfg_to_yaml_str(settings) - + print_info_msg( dedent( f""" The variable \"settings\" specifying values to be used in the \"{NEMS_CONFIG_FN}\" file has been set as follows:\n settings =\n\n""" - ) - + settings_str, + ) + + settings_str, verbose=VERBOSE, ) # @@ -124,7 +126,5 @@ def parse_args(argv): cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_nems_configure_file( - run_dir=args.run_dir, + run_dir=args.run_dir, ) - - From c35337d9e4172b046d84f6681dc117fc8447511d Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 8 May 2023 21:42:34 +0000 Subject: [PATCH 07/38] Linting workflow generation script. --- .github/workflows/python_linter.yaml | 3 +- .pylintrc | 623 +++++++++++++++++++++++++++ ush/generate_FV3LAM_wflow.py | 142 +++--- 3 files changed, 697 insertions(+), 71 deletions(-) create mode 100644 .pylintrc diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml index e58562608b..647105db12 100644 --- a/.github/workflows/python_linter.yaml +++ b/.github/workflows/python_linter.yaml @@ -35,4 +35,5 @@ jobs: run: | export PYTHONPATH=${PWD}/ush pylint --ignore-imports=yes tests/test_python/ - pylint --min-similarity-lines 15 ush/create_*.py + pylint ush/create_*.py + pylint ush/generate_FV3LAM_wflow.py diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000000..a4bfcd4b2f --- /dev/null +++ b/.pylintrc @@ -0,0 +1,623 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.9 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=15 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the 'python-enchant' package. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 0652d16179..f7616f9348 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -1,45 +1,43 @@ #!/usr/bin/env python3 +""" +User interface to create an experiment directory consistent with the +user-defined config.yaml file. +""" + +# pylint: disable=invalid-name + import os -import sys -import subprocess import logging -from multiprocessing import Process from textwrap import dedent -from datetime import datetime, timedelta from python_utils import ( log_info, import_vars, export_vars, - load_config_file, - update_dict, cp_vrfy, ln_vrfy, mkdir_vrfy, mv_vrfy, - run_command, - date_to_str, - define_macos_utilities, create_symlink_to_file, check_for_preexist_dir_file, cfg_to_yaml_str, find_pattern_in_str, - set_env_var, - get_env_var, - lowercase, flatten_dict, ) from setup import setup from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames from get_crontab_contents import add_crontab_line -from scripts.templater import set_template from set_namelist import set_namelist from check_python_version import check_python_version +from scripts.templater import set_template - -def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> str: +# pylint: disable=too-many-locals,too-many-branches, too-many-statements +def generate_FV3LAM_wflow( + ushdir, + logfile: str = "log.generate_FV3LAM_wflow", + debug: bool = False) -> str: """Function to setup a forecast experiment and create a workflow (according to the parameters specified in the config file) @@ -101,7 +99,6 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de expt_config["user"]["PARMdir"], wflow_xml_fn, ) - global_var_defns_fp = expt_config["workflow"]["GLOBAL_VAR_DEFNS_FP"] log_info( f""" @@ -157,6 +154,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de import_vars(dictionary=flatten_dict(expt_config)) export_vars(source_dict=flatten_dict(expt_config)) + # pylint: disable=undefined-variable if USE_CRON_TO_RELAUNCH: add_crontab_line() @@ -227,20 +225,20 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # ----------------------------------------------------------------------- # log_info( - f""" + """ Copying templates of various input files to the experiment directory...""", verbose=verbose, ) log_info( - f""" + """ Copying the template data table file to the experiment directory...""", verbose=verbose, ) cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) log_info( - f""" + """ Copying the template field table file to the experiment directory...""", verbose=verbose, ) @@ -252,7 +250,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # DIR). # log_info( - f""" + """ Copying the CCPP physics suite definition XML file from its location in the forecast model directory structure to the experiment directory...""", verbose=verbose, @@ -264,9 +262,10 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # DIR). # log_info( - f""" - Copying the field dictionary file from its location in the forecast - model directory structure to the experiment directory...""", + """ + Copying the field dictionary file from its location in the + forecast model directory structure to the experiment + directory...""", verbose=verbose, ) cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) @@ -284,7 +283,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de ) # # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. - # These need to be set in the FV3-LAM Fortran namelist file. They represent + # These need to be set in the FV3-LAM Fortran namelist file. They represen # the number of cell vertices in the x and y directions on the regional # grid. # @@ -292,7 +291,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de npy = NY + 1 # # For the physics suites that use RUC LSM, set the parameter kice to 9, - # Otherwise, leave it unspecified (which means it gets set to the default + # Otherwise, leave it unspecified (which means it gets set to the defaul # value in the forecast model). # kice = None @@ -304,7 +303,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # nsoill_out in the namelist file for chgres_cube. [On the other hand, # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or # FV3.input.yml) is the number of soil levels that the LSM scheme in the - # forecast model will run with.] Here, we use the same approach to set + # forecast model will run with.] Here, we use the same approach to se # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. # See that script for details. # @@ -313,9 +312,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. # lsoil = 4 - if (EXTRN_MDL_NAME_ICS == "HRRR" or EXTRN_MDL_NAME_ICS == "RAP") and ( - SDF_USES_RUC_LSM - ): + if EXTRN_MDL_NAME_ICS in ("HRRR", "RAP") and SDF_USES_RUC_LSM: lsoil = 9 if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": lsoil = "" @@ -362,9 +359,10 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de "layout": [LAYOUT_X, LAYOUT_Y], "bc_update_interval": LBC_SPEC_INTVL_HRS, }) - if ( CCPP_PHYS_SUITE == "FV3_GFS_2017_gfdl_mp" or - CCPP_PHYS_SUITE == "FV3_GFS_2017_gfdlmp_regional" or - CCPP_PHYS_SUITE == "FV3_GFS_v15p2" ): + if CCPP_PHYS_SUITE in ("FV3_GFS_2017_gfdl_mp", + "FV3_GFS_2017_gfdlmp_regional", + "FV3_GFS_v15p2", + ): if CPL_AQM: fv_core_nml_dict.update({ "dnats": 5 @@ -373,7 +371,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de fv_core_nml_dict.update({ "dnats": 1 }) - elif CCPP_PHYS_SUITE == "FV3_GFS_v16": + elif CCPP_PHYS_SUITE == "FV3_GFS_v16": if CPL_AQM: fv_core_nml_dict.update({ "hord_tr": 8, @@ -394,7 +392,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de "dnats": 0 }) - settings["fv_core_nml"] = fv_core_nml_dict + settings["fv_core_nml"] = fv_core_nml_dic gfs_physics_nml_dict = {} gfs_physics_nml_dict.update({ @@ -411,31 +409,32 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de }) if CPL_AQM: gfs_physics_nml_dict.update({ - "cplaqm": True, + "cplaqm": True, "cplocn2atm": False, - "fscav_aero": ["aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", - "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", - "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", - "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", - "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", - "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", - "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", - "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", - "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", - "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", - "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", - "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", - "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", - "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", - "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", - "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", - "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] + "fscav_aero": [ + "aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", + "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", + "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", + "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", + "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", + "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", + "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", + "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", + "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", + "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", + "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", + "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", + "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", + "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", + "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", + "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", + "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] }) - settings["gfs_physics_nml"] = gfs_physics_nml_dict + settings["gfs_physics_nml"] = gfs_physics_nml_dic # # Add to "settings" the values of those namelist variables that specify - # the paths to fixed files in the FIXam directory. As above, these namelist + # the paths to fixed files in the FIXam directory. As above, these namelis # variables are physcs-suite-independent. # # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains @@ -461,14 +460,14 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de if FIXam_fn: fp = os.path.join(FIXam, FIXam_fn) # - # If not in NCO mode, for portability and brevity, change fp so that it + # If not in NCO mode, for portability and brevity, change fp so that i # is a relative path (relative to any cycle directory immediately under # the experiment directory). # if RUN_ENVIR != "nco": fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) # - # Add a line to the variable "settings" that specifies (in a yaml-compliant + # Add a line to the variable "settings" that specifies (in a yaml-complian # format) the name of the current namelist variable and the value it should # be set to. # @@ -476,7 +475,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # # Add namsfc_dict to settings # - settings["namsfc"] = namsfc_dict + settings["namsfc"] = namsfc_dic # # Use netCDF4 when running the North American 3-km domain due to file size. # @@ -533,7 +532,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de if DO_SPP or DO_LSM_SPP: nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) - settings["nam_stochy"] = nam_stochy_dict + settings["nam_stochy"] = nam_stochy_dic # # Add the relevant SPP namelist variables to "settings" when running with # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. @@ -573,7 +572,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de settings_str = cfg_to_yaml_str(settings) log_info( - f""" + """ The variable 'settings' specifying values of the weather model's namelist variables has been set as follows:\n""", verbose=verbose, @@ -584,7 +583,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # # Call the set_namelist.py script to create a new FV3 namelist file (full # path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as - # the base (i.e. starting) namelist file, with physics-suite-dependent + # the base (i.e. starting) namelist file, with physics-suite-dependen # modifications to the base file specified in the yaml configuration file # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), # and with additional physics-suite-independent modifications specified @@ -607,7 +606,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de FV3_NML_FP, ] ) - except: + except: # pylint: disable=bare-except logging.exception( dedent( f""" @@ -632,7 +631,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # the paths to surface climatology files. These files are located in # (or have symlinks that point to them) in the FIXlam directory. # - # Note that if running the TN_MAKE_GRID task, this action usually cannot + # Note that if running the TN_MAKE_GRID task, this action usually canno # be performed here but must be performed in that task because the names # of the surface climatology files depend on the CRES parameter (which is # the C-resolution of the grid), and this parameter is in most workflow @@ -668,6 +667,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" + # pylint: disable=line-too-long log_info( f""" To launch the workflow, change location to the experiment directory @@ -684,7 +684,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de Note that: 1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next + task in the workflow in order for the workflow to submit the nex task(s) to the queue. 2) In order for the output of the rocotostat command to be up-to-date, @@ -698,8 +698,10 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de */{CRON_RELAUNCH_INTVL_MNTS} * * * * cd {EXPTDIR} && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" """ ) + # pylint: enable=line-too-long - # If we got to this point everything was successful: move the log file to the experiment directory. + # If we got to this point everything was successful: move the log + # file to the experiment directory. mv_vrfy(logfile, EXPTDIR) return EXPTDIR @@ -709,7 +711,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals """ Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all messages with detailed timing and routine info in the specified text file. - + If debug = True, print all messages to both screen and log file. """ logging.getLogger().setLevel(logging.DEBUG) @@ -720,9 +722,10 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logging.getLogger().addHandler(fh) - logging.debug(f"Finished setting up debug file logging in {logfile}") + logging.debug("Finished setting up debug file logging in {logfile}") - # If there are already multiple handlers, that means generate_FV3LAM_workflow was called from another function. + # If there are already multiple handlers, that means + # generate_FV3LAM_workflow was called from another function. # In that case, do not change the console (print-to-screen) logging. if len(logging.getLogger().handlers) > 1: return @@ -759,7 +762,8 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals ) ) raise - + + # pylint: disable=undefined-variable # Note workflow generation completion log_info( f""" @@ -774,5 +778,3 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals ======================================================================== """ ) - - From 8d317e710020e17e3b60ecf48ea3b763f45e6bff Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 10 May 2023 14:13:35 +0000 Subject: [PATCH 08/38] Fix some silly typos. --- ush/generate_FV3LAM_wflow.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index f7616f9348..7a30a85e25 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -392,7 +392,7 @@ def generate_FV3LAM_wflow( "dnats": 0 }) - settings["fv_core_nml"] = fv_core_nml_dic + settings["fv_core_nml"] = fv_core_nml_dict gfs_physics_nml_dict = {} gfs_physics_nml_dict.update({ @@ -430,11 +430,11 @@ def generate_FV3LAM_wflow( "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] }) - settings["gfs_physics_nml"] = gfs_physics_nml_dic + settings["gfs_physics_nml"] = gfs_physics_nml_dict # # Add to "settings" the values of those namelist variables that specify - # the paths to fixed files in the FIXam directory. As above, these namelis + # the paths to fixed files in the FIXam directory. As above, these namelist # variables are physcs-suite-independent. # # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains @@ -460,14 +460,14 @@ def generate_FV3LAM_wflow( if FIXam_fn: fp = os.path.join(FIXam, FIXam_fn) # - # If not in NCO mode, for portability and brevity, change fp so that i + # If not in NCO mode, for portability and brevity, change fp so that it # is a relative path (relative to any cycle directory immediately under # the experiment directory). # if RUN_ENVIR != "nco": fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) # - # Add a line to the variable "settings" that specifies (in a yaml-complian + # Add a line to the variable "settings" that specifies (in a yaml-compliant # format) the name of the current namelist variable and the value it should # be set to. # @@ -475,7 +475,7 @@ def generate_FV3LAM_wflow( # # Add namsfc_dict to settings # - settings["namsfc"] = namsfc_dic + settings["namsfc"] = namsfc_dict # # Use netCDF4 when running the North American 3-km domain due to file size. # @@ -532,7 +532,7 @@ def generate_FV3LAM_wflow( if DO_SPP or DO_LSM_SPP: nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) - settings["nam_stochy"] = nam_stochy_dic + settings["nam_stochy"] = nam_stochy_dict # # Add the relevant SPP namelist variables to "settings" when running with # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. @@ -583,7 +583,7 @@ def generate_FV3LAM_wflow( # # Call the set_namelist.py script to create a new FV3 namelist file (full # path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as - # the base (i.e. starting) namelist file, with physics-suite-dependen + # the base (i.e. starting) namelist file, with physics-suite-dependent # modifications to the base file specified in the yaml configuration file # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), # and with additional physics-suite-independent modifications specified @@ -631,7 +631,7 @@ def generate_FV3LAM_wflow( # the paths to surface climatology files. These files are located in # (or have symlinks that point to them) in the FIXlam directory. # - # Note that if running the TN_MAKE_GRID task, this action usually canno + # Note that if running the TN_MAKE_GRID task, this action usually cannot # be performed here but must be performed in that task because the names # of the surface climatology files depend on the CRES parameter (which is # the C-resolution of the grid), and this parameter is in most workflow @@ -684,7 +684,7 @@ def generate_FV3LAM_wflow( Note that: 1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the nex + task in the workflow in order for the workflow to submit the next task(s) to the queue. 2) In order for the output of the rocotostat command to be up-to-date, From d50496b1a3e996ee6091300f240251ab1a4d2038 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 11 May 2023 13:44:19 +0000 Subject: [PATCH 09/38] There it is! --- ush/create_model_configure_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index b8ef483cb4..f8df5d7274 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -81,7 +81,7 @@ def create_model_configure_file( "dt_atmos": DT_ATMOS, "restart_interval": RESTART_INTERVAL, "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", - "quilting": f".{lowercase(str(WRITE_DOPOST))}.", + "quilting": f".{lowercase(str(QUILTING))}.", "output_grid": WRTCMP_output_grid, } # From 03d2b9314b2bd26a1da9326c33665b4e7eb39e01 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 11 May 2023 18:47:34 +0000 Subject: [PATCH 10/38] Use dev branch for uwtools for now. --- Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index a431652733..e7934d3d1d 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -66,8 +66,8 @@ required = True protocol = git repo_url = https://github.com/ufs-community/workflow-tools # Specify either a branch name or a hash but not both. -#branch = develop -hash = 04da24c +branch = srw_integration_changes +#hash = 04da24c local_path = ush/python_utils/uwtools required = True From 635065f5f4f265c4ac03df3fd913484823780ecb Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 11 May 2023 19:56:35 +0000 Subject: [PATCH 11/38] Handle PYTHONPATH for github actions. --- .github/workflows/python_unittests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_unittests.yaml b/.github/workflows/python_unittests.yaml index 0f213ff6bb..93456c4412 100644 --- a/.github/workflows/python_unittests.yaml +++ b/.github/workflows/python_unittests.yaml @@ -39,6 +39,6 @@ jobs: run: | ./manage_externals/checkout_externals ufs-weather-model # exclude test_retrieve_data that is tested in functional test - export PYTHONPATH=${PWD}/ush + export PYTHONPATH=${PWD}/ush:${PWD}/ush/python_utils/uwtools:${PWD}/ush/python_utils/uwtools/src python3 -m unittest -b tests/test_python/*.py From ee339358ab437951403badd0a246b99c9ad86e89 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 12 May 2023 21:42:14 +0000 Subject: [PATCH 12/38] Make unittests work on GHA? --- .github/workflows/python_unittests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_unittests.yaml b/.github/workflows/python_unittests.yaml index 93456c4412..f049b0ef5b 100644 --- a/.github/workflows/python_unittests.yaml +++ b/.github/workflows/python_unittests.yaml @@ -39,6 +39,6 @@ jobs: run: | ./manage_externals/checkout_externals ufs-weather-model # exclude test_retrieve_data that is tested in functional test - export PYTHONPATH=${PWD}/ush:${PWD}/ush/python_utils/uwtools:${PWD}/ush/python_utils/uwtools/src + export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/uwtools:$(pwd)/ush/python_utils/uwtools/src python3 -m unittest -b tests/test_python/*.py From 6bb4dc6a2c81aeabf44218102dcdc316620d7cd5 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 12 May 2023 21:56:28 +0000 Subject: [PATCH 13/38] Check out uwtools as an external for these tests. --- .github/workflows/python_unittests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_unittests.yaml b/.github/workflows/python_unittests.yaml index f049b0ef5b..9fdc317d4f 100644 --- a/.github/workflows/python_unittests.yaml +++ b/.github/workflows/python_unittests.yaml @@ -37,7 +37,7 @@ jobs: # Run python unittests - name: Run python unittests run: | - ./manage_externals/checkout_externals ufs-weather-model + ./manage_externals/checkout_externals ufs-weather-model uwtools # exclude test_retrieve_data that is tested in functional test export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/uwtools:$(pwd)/ush/python_utils/uwtools/src python3 -m unittest -b tests/test_python/*.py From 91aac5252c2d1c29867e2282b76495baa4005d07 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 12 May 2023 22:03:18 +0000 Subject: [PATCH 14/38] Updating the hash of workflow-tools. --- Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index e7934d3d1d..6f952f5f5a 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -66,8 +66,8 @@ required = True protocol = git repo_url = https://github.com/ufs-community/workflow-tools # Specify either a branch name or a hash but not both. -branch = srw_integration_changes -#hash = 04da24c +# branch = develop +hash = e1b3b6f local_path = ush/python_utils/uwtools required = True From 5ac70d136c1bb51f828207bb555efe7e8c23e796 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 15 May 2023 14:04:52 +0000 Subject: [PATCH 15/38] Checkout uwtools externals to pass linter. --- .github/workflows/python_linter.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml index 336e1b9bbc..4193651483 100644 --- a/.github/workflows/python_linter.yaml +++ b/.github/workflows/python_linter.yaml @@ -33,6 +33,7 @@ jobs: # Run python unittests - name: Lint the test directory run: | + ./manage_externals/checkout_externals uwtools export PYTHONPATH=${PWD}/ush pylint --ignore-imports=yes tests/test_python/ pylint ush/create_*.py From d5312af6ca3d52fb06418f81edd880b41a2953ff Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 15 May 2023 14:08:42 +0000 Subject: [PATCH 16/38] Same PYTHONPATH as unittests. --- .github/workflows/python_linter.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml index 4193651483..67a862abf6 100644 --- a/.github/workflows/python_linter.yaml +++ b/.github/workflows/python_linter.yaml @@ -34,7 +34,7 @@ jobs: - name: Lint the test directory run: | ./manage_externals/checkout_externals uwtools - export PYTHONPATH=${PWD}/ush + export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/uwtools:$(pwd)/ush/python_utils/uwtools/src pylint --ignore-imports=yes tests/test_python/ pylint ush/create_*.py pylint ush/generate_FV3LAM_wflow.py From 72f57fae857337f571e6f92f166a94e245ee3119 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 15 May 2023 15:44:26 +0000 Subject: [PATCH 17/38] Replate fill_jinja_template in new scripts. --- ...onal_run_met_genensprod_or_ensemblestat.sh | 23 +++++++++++------- ...gional_run_met_gridstat_or_pointstat_vx.sh | 23 +++++++++++------- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 24 ++++++++++++------- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 17 ++++++------- scripts/exregional_run_met_pb2nc_obs.sh | 2 +- scripts/exregional_run_met_pcpcombine.sh | 3 +-- 6 files changed, 57 insertions(+), 35 deletions(-) diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 9f5ddef0b7..01d245a8f1 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -381,25 +381,32 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " + +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") +cat > $tmpfile << EOF +$settings +EOF # # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -c "${tmpfile}" \ + -i ${metplus_config_tmpl_fp} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Jinja settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 62e5961f4e..feb3de6c27 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -361,25 +361,32 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " + +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") +cat > $tmpfile << EOF +$settings +EOF # # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -c "${tmpfile}" \ + -i ${metplus_config_tmpl_fp} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Jinja settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 15c2e34a56..e6ae8e3db4 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -347,25 +347,33 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " + +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") +cat > $tmpfile << EOF +$settings +EOF # # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ + +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -c "${tmpfile}" \ + -i ${metplus_config_tmpl_fp} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Jinja settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index fb33760892..d2b61cdbc3 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -313,21 +313,22 @@ settings="\ # Call the python script to generate the METplus configuration file from # the jinja template. # -$USHdir/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${metplus_config_tmpl_fp} \ - -o ${metplus_config_fp} || \ +python3 $USHdir/python_utils/uwtools/scripts/templater.py \ + -c "${tmpfile}" \ + -i ${metplus_config_tmpl_fp} \ + -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to python script fill_jinja_template.py to generate a METplus +Call to uwtools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: metplus_config_tmpl_fp = \"${metplus_config_tmpl_fp}\" Full path to output METplus configuration file: metplus_config_fp = \"${metplus_config_fp}\" - Jinja settings specified on command line: - settings = -$settings" + Full path to configuration file: + ${tmpfile} +" +rm $tmpfile # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index ef63f3d63a..a5a23181c6 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -288,8 +288,8 @@ EOF # the jinja template. # python3 $USHdir/python_utils/uwtools/scripts/templater.py \ - -i ${metplus_config_tmpl_fp} \ -c ${tmpfile} \ + -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ Call to uwtools templater.py to generate a METplus diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index e0b11b2e9d..c3b35068e8 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -342,9 +342,8 @@ EOF # the jinja template. # python3 $USHdir/python_utils/uwtools/scripts/templater.py \ - -q \ - -i ${metplus_config_tmpl_fp} \ -c ${tmpfile} \ + -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ Call to uwtools templater.py to generate a METplus From 4484a19695cac6f374c2e8480c52d5b531cf43a9 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 15 May 2023 16:16:09 +0000 Subject: [PATCH 18/38] Add missed tmpfile creation block. --- .../exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index d2b61cdbc3..f208919783 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -309,6 +309,11 @@ settings="\ 'accum_no_pad': '${ACCUM_NO_PAD:-}' 'field_thresholds': '${FIELD_THRESHOLDS:-}' " +# Store the settings in a temporary file +tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") +cat > $tmpfile << EOF +$settings +EOF # # Call the python script to generate the METplus configuration file from # the jinja template. From 7dd1c7a50c536649cf8230a7f485ae389ad853ff Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 17 May 2023 14:22:31 +0000 Subject: [PATCH 19/38] Linting. --- ush/generate_FV3LAM_wflow.py | 191 ++++++++++------------------------- 1 file changed, 51 insertions(+), 140 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 7e77bd2065..9516a99604 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -480,7 +480,7 @@ def generate_FV3LAM_wflow( settings_str = cfg_to_yaml_str(settings) log_info( - f""" + """ The variable 'settings' specifying values of the weather model's namelist variables has been set as follows:\n""", verbose=verbose, @@ -499,40 +499,19 @@ def generate_FV3LAM_wflow( # # ----------------------------------------------------------------------- # - try: - set_namelist( - [ - "-q", - "-n", - FV3_NML_BASE_SUITE_FP, - "-c", - FV3_NML_YAML_CONFIG_FP, - CCPP_PHYS_SUITE, - "-u", - settings_str, - "-o", - FV3_NML_FP, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_BASE_SUITE_FP = '{FV3_NML_BASE_SUITE_FP}' - Full path to yaml configuration file for various physics suites: - FV3_NML_YAML_CONFIG_FP = '{FV3_NML_YAML_CONFIG_FP}' - Physics suite to extract from yaml configuration file: - CCPP_PHYS_SUITE = '{CCPP_PHYS_SUITE}' - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) + set_namelist( + [ + "-n", + FV3_NML_BASE_SUITE_FP, + "-c", + FV3_NML_YAML_CONFIG_FP, + CCPP_PHYS_SUITE, + "-u", + settings_str, + "-o", + FV3_NML_FP, + ] + ) # # If not running the TN_MAKE_GRID task (which implies the workflow will # use pregenerated grid files), set the namelist variables specifying @@ -569,34 +548,16 @@ def generate_FV3LAM_wflow( # # populate the namelist file # - try: - set_namelist( - [ - "-q", - "-n", - FV3_NML_FP, - "-u", - settings_str, - "-o", - FV3_NML_CYCSFC_FP, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file for DA: - FV3_NML_RESTART_FP = '{FV3_NML_CYCSFC_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - + set_namelist( + [ + "-n", + FV3_NML_FP, + "-u", + settings_str, + "-o", + FV3_NML_CYCSFC_FP, + ] + ) # # ----------------------------------------------------------------------- # @@ -612,7 +573,7 @@ def generate_FV3LAM_wflow( lupdatebc = False if DO_UPDATE_BC: lupdatebc = False # not ready for setting this to true yet - + settings = {} settings["fv_core_nml"] = { "external_ic": False, @@ -632,33 +593,17 @@ def generate_FV3LAM_wflow( # # populate the namelist file # - try: - set_namelist( - [ - "-q", - "-n", - FV3_NML_FP, - "-u", - settings_str, - "-o", - FV3_NML_RESTART_FP, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file for DA: - FV3_NML_RESTART_FP = '{FV3_NML_RESTART_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) + set_namelist( + [ + "-q", + "-n", + FV3_NML_FP, + "-u", + settings_str, + "-o", + FV3_NML_RESTART_FP, + ] + ) # # ----------------------------------------------------------------------- # @@ -770,65 +715,31 @@ def generate_FV3LAM_wflow( # #----------------------------------------------------------------------- # - if DO_ENSEMBLE and ( DO_SPP or DO_SPPT or DO_SHUM or DO_SKEB or DO_LSM_SPP): + if DO_ENSEMBLE and any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): - try: + set_namelist( + [ + "-n", + FV3_NML_FP, + "-u", + settings_str, + "-o", + FV3_NML_STOCH_FP, + ] + ) + + if DO_DACYCLE or DO_ENKFUPDATE: set_namelist( [ "-q", "-n", - FV3_NML_FP, + FV3_NML_RESTART_FP, "-u", settings_str, "-o", - FV3_NML_STOCH_FP, + FV3_NML_RESTART_STOCH_FP, ] ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file for stochastics: - FV3_NML_STOCH_FP = '{FV3_NML_STOCH_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - - if DO_DACYCLE or DO_ENKFUPDATE: - try: - set_namelist( - [ - "-q", - "-n", - FV3_NML_RESTART_FP, - "-u", - settings_str, - "-o", - FV3_NML_RESTART_STOCH_FP, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file for stochastics: - FV3_NML_RESTART_STOCH_FP = '{FV3_NML_RESTART_STOCH_FP}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - # # ----------------------------------------------------------------------- # From cf3871c44dc2f46bb30876764a988767f124babf Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 18 May 2023 15:12:45 +0000 Subject: [PATCH 20/38] Ignore the new external. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index bc3eee8545..af17ca4b65 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ tests/WE2E/log.* ush/__pycache__/ ush/config.yaml ush/python_utils/__pycache__/ +ush/python_utils/uwtools/ ush/*.swp *.swp From cdb098d52780153f9234742ac80707a2bb176f97 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 19 May 2023 16:01:48 +0000 Subject: [PATCH 21/38] Attempting to fix a getcwd issue with tests. --- tests/test_python/test_python_utils.py | 3 +-- .../test_set_FV3nml_ens_stoch_seeds.py | 17 ++++++++++------- ush/set_FV3nml_sfc_climo_filenames.py | 2 +- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/tests/test_python/test_python_utils.py b/tests/test_python/test_python_utils.py index ea7c903b16..4104d7db93 100644 --- a/tests/test_python/test_python_utils.py +++ b/tests/test_python/test_python_utils.py @@ -168,8 +168,7 @@ def test_import_vars(self): # python util.import_vars(env_vars=env_vars) - # assuming all environments arlready have $PWD set - #pylint: disable= + # assuming all environments already have $PWD set self.assertEqual( os.path.realpath(PWD), #pylint: disable=undefined-variable os.path.realpath(os.getcwd()) diff --git a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py b/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py index 15c7d79223..9a452bc1c2 100644 --- a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py +++ b/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py @@ -21,6 +21,7 @@ class Testing(unittest.TestCase): """ Define the tests """ def test_set_FV3nml_ens_stoch_seeds(self): """ Call the function and make sure it doesn't fail""" + os.chdir(self.mem_dir) set_FV3nml_ens_stoch_seeds(cdate=self.cdate) def setUp(self): @@ -40,18 +41,20 @@ def setUp(self): ) EXPTDIR = self.tmp_dir.name - cp_vrfy( - os.path.join(PARMdir, "input.nml.FV3"), - os.path.join(EXPTDIR, "input.nml_base"), - ) - # Put this in the tmp_dir structure so it gets cleaned up - mem_dir = os.path.join( + self.mem_dir = os.path.join( EXPTDIR, f"{date_to_str(self.cdate,format='%Y%m%d%H')}", "mem2", ) - mkdir_vrfy("-p", mem_dir) + + mkdir_vrfy("-p", self.mem_dir) + cp_vrfy( + os.path.join(PARMdir, "input.nml.FV3"), + os.path.join(EXPTDIR, "input.nml_base"), + ) + + set_env_var("USHdir", USHdir) set_env_var("ENSMEM_INDX", 2) set_env_var("FV3_NML_FN", "input.nml") diff --git a/ush/set_FV3nml_sfc_climo_filenames.py b/ush/set_FV3nml_sfc_climo_filenames.py index cdc4ed1628..bc579151c5 100644 --- a/ush/set_FV3nml_sfc_climo_filenames.py +++ b/ush/set_FV3nml_sfc_climo_filenames.py @@ -60,7 +60,7 @@ def set_FV3nml_sfc_climo_filenames(): # Set the suffix of the surface climatology files. suffix = "tileX.nc" - # create yaml-complaint string + # create yaml-compliant string settings = {} dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") From 787b2f7c39313c5402a00b38c8ef2080b2aee5d1 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 19 May 2023 16:28:12 +0000 Subject: [PATCH 22/38] Set PYTHONPATH for all platforms. --- modulefiles/set_pythonpath.lua | 13 +++++++++++++ modulefiles/wflow_cheyenne.lua | 2 ++ modulefiles/wflow_gaea.lua | 1 + modulefiles/wflow_hera.lua | 9 +-------- modulefiles/wflow_jet.lua | 1 + modulefiles/wflow_linux.lua | 3 +++ modulefiles/wflow_macos.lua | 3 +++ modulefiles/wflow_noaacloud.lua | 2 ++ modulefiles/wflow_odin.lua | 2 ++ modulefiles/wflow_orion.lua | 1 + modulefiles/wflow_singularity.lua | 1 + modulefiles/wflow_wcoss2.lua | 1 + 12 files changed, 31 insertions(+), 8 deletions(-) create mode 100644 modulefiles/set_pythonpath.lua diff --git a/modulefiles/set_pythonpath.lua b/modulefiles/set_pythonpath.lua new file mode 100644 index 0000000000..c6db556c43 --- /dev/null +++ b/modulefiles/set_pythonpath.lua @@ -0,0 +1,13 @@ +help([[ +This module sets the PYTHONPATH in the user environment to allow the +workflow tools to be imported +]]) + +whatis([===[Sets paths for using workflow-tools with SRW]===]) + +local mod_path, mod_file = splitFileName(myFileName()) +local uwtools_scripts_path = pathJoin(mod_path, "/../ush/python_utils/uwtools") +local uwtools_package_path = pathJoin(mod_path, "/../ush/python_utils/uwtools/src/") + +prepend_path("PYTHONPATH", uwtools_scripts_path) +prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/wflow_cheyenne.lua b/modulefiles/wflow_cheyenne.lua index 35b7365d43..094bf0918a 100644 --- a/modulefiles/wflow_cheyenne.lua +++ b/modulefiles/wflow_cheyenne.lua @@ -11,6 +11,8 @@ append_path("MODULEPATH","/glade/p/ral/jntp/UFS_SRW_app/modules") load("rocoto") unload("python") + +load("set_pythonpath") prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) diff --git a/modulefiles/wflow_gaea.lua b/modulefiles/wflow_gaea.lua index e4b726aac1..96efa3c31c 100644 --- a/modulefiles/wflow_gaea.lua +++ b/modulefiles/wflow_gaea.lua @@ -5,6 +5,7 @@ the NOAA RDHPC machine Gaea whatis([===[Loads libraries needed for running the UFS SRW App on gaea ]===]) +load("set_pythonpath") prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) load("rocoto") diff --git a/modulefiles/wflow_hera.lua b/modulefiles/wflow_hera.lua index 56b872870e..3e981921b2 100644 --- a/modulefiles/wflow_hera.lua +++ b/modulefiles/wflow_hera.lua @@ -6,14 +6,7 @@ the NOAA RDHPC machine Hera whatis([===[Loads libraries needed for running the UFS SRW App on Hera ]===]) load("rocoto") - - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "/../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "/../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) +load("set_pythonpath") prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) diff --git a/modulefiles/wflow_jet.lua b/modulefiles/wflow_jet.lua index be11c223bc..3ffcd68414 100644 --- a/modulefiles/wflow_jet.lua +++ b/modulefiles/wflow_jet.lua @@ -6,6 +6,7 @@ the NOAA RDHPC machine Jet whatis([===[Loads libraries needed for running the UFS SRW App on Jet ]===]) load("rocoto") +load("set_pythonpath") prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) diff --git a/modulefiles/wflow_linux.lua b/modulefiles/wflow_linux.lua index 6c4cc6949d..74f7c8edb6 100644 --- a/modulefiles/wflow_linux.lua +++ b/modulefiles/wflow_linux.lua @@ -31,6 +31,9 @@ prepend_path("PATH", pathJoin(rocoto_path,"bin")) local srw_path="/home/username/ufs-srweather-app" prepend_path("PATH", pathJoin(srw_path, "ush/rocoto_fake_slurm")) +-- set python path +load("set_pythonpath") + -- display conda activation message if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: diff --git a/modulefiles/wflow_macos.lua b/modulefiles/wflow_macos.lua index d7cf30e0a3..ddd3b60ae6 100644 --- a/modulefiles/wflow_macos.lua +++ b/modulefiles/wflow_macos.lua @@ -31,6 +31,9 @@ prepend_path("PATH", pathJoin(rocoto_path,"bin")) local srw_path="/Users/username/ufs-srweather-app" prepend_path("PATH", pathJoin(srw_path, "ush/rocoto_fake_slurm")) +-- set python path +load("set_pythonpath") + -- display conda activation message if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda virtual environment: diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua index 80501f5075..6c8925444d 100644 --- a/modulefiles/wflow_noaacloud.lua +++ b/modulefiles/wflow_noaacloud.lua @@ -7,6 +7,8 @@ whatis([===[Loads libraries needed for running the UFS SRW App on NOAA cloud ]== prepend_path("MODULEPATH","/apps/modules/modulefiles") load("rocoto") +load("set_pythonpath") + prepend_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) diff --git a/modulefiles/wflow_odin.lua b/modulefiles/wflow_odin.lua index 7b1b5d8203..886d072d06 100644 --- a/modulefiles/wflow_odin.lua +++ b/modulefiles/wflow_odin.lua @@ -5,6 +5,8 @@ the NSSL machine Odin whatis([===[Loads libraries needed for running the UFS SRW App on Odin ]===]) +load("set_pythonpath") + if mode() == "load" then -- >>> conda initialize >>> -- !! Contents within this block are managed by 'conda init' !! diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua index 78997de1bb..b56ae2a69b 100644 --- a/modulefiles/wflow_orion.lua +++ b/modulefiles/wflow_orion.lua @@ -8,6 +8,7 @@ whatis([===[Loads libraries needed for running SRW on Orion ]===]) load("contrib") load("rocoto") load("wget") +load("set_pythonpath") unload("python") append_path("MODULEPATH","/work/noaa/epic-ps/role-epic-ps/miniconda3/modulefiles") diff --git a/modulefiles/wflow_singularity.lua b/modulefiles/wflow_singularity.lua index 3c16a93570..7d097afa28 100644 --- a/modulefiles/wflow_singularity.lua +++ b/modulefiles/wflow_singularity.lua @@ -4,6 +4,7 @@ a singularity container ]]) whatis([===[Loads libraries needed for running the UFS SRW App in a singularity container]===]) +load("set_pythonpath") append_path("MODULEPATH","/opt/hpc-modules/modulefiles/core") load("miniconda3") diff --git a/modulefiles/wflow_wcoss2.lua b/modulefiles/wflow_wcoss2.lua index a061b93323..4212f17493 100644 --- a/modulefiles/wflow_wcoss2.lua +++ b/modulefiles/wflow_wcoss2.lua @@ -7,6 +7,7 @@ whatis([===[Loads libraries needed for running the UFS SRW App on WCOSS2 ]===]) load(pathJoin("intel", os.getenv("intel_ver"))) load(pathJoin("python", os.getenv("python_ver"))) +load("set_pythonpath") prepend_path("MODULEPATH","/apps/ops/test/nco/modulefiles") load(pathJoin("core/rocoto", os.getenv("rocoto_ver"))) From 89c6a716121af39ba1dd8f11e75b1d9de4ea3960 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 19 May 2023 19:44:47 +0000 Subject: [PATCH 23/38] Fix bad text edits. --- ush/generate_FV3LAM_wflow.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index a3932c76a6..184b0ea21b 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -282,7 +282,7 @@ def generate_FV3LAM_wflow( ) # # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. - # These need to be set in the FV3-LAM Fortran namelist file. They represen + # These need to be set in the FV3-LAM Fortran namelist file. They represent # the number of cell vertices in the x and y directions on the regional # grid. # @@ -290,7 +290,7 @@ def generate_FV3LAM_wflow( npy = NY + 1 # # For the physics suites that use RUC LSM, set the parameter kice to 9, - # Otherwise, leave it unspecified (which means it gets set to the defaul + # Otherwise, leave it unspecified (which means it gets set to the default # value in the forecast model). # kice = None @@ -302,7 +302,7 @@ def generate_FV3LAM_wflow( # nsoill_out in the namelist file for chgres_cube. [On the other hand, # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or # FV3.input.yml) is the number of soil levels that the LSM scheme in the - # forecast model will run with.] Here, we use the same approach to se + # forecast model will run with.] Here, we use the same approach to set # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. # See that script for details. # From 3599c64179fc9aa70eabaa85781c6a0ba0a1db7e Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 15:12:26 +0000 Subject: [PATCH 24/38] Change external from uwtools to workflow-tools --- .github/workflows/python_linter.yaml | 4 ++-- .github/workflows/python_unittests.yaml | 4 ++-- Externals.cfg | 4 ++-- scripts/exregional_run_met_genensprod_or_ensemblestat.sh | 4 ++-- scripts/exregional_run_met_gridstat_or_pointstat_vx.sh | 4 ++-- .../exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh | 4 ++-- .../exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh | 4 ++-- scripts/exregional_run_met_pb2nc_obs.sh | 4 ++-- scripts/exregional_run_met_pcpcombine.sh | 4 ++-- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml index 67a862abf6..b63a2a513c 100644 --- a/.github/workflows/python_linter.yaml +++ b/.github/workflows/python_linter.yaml @@ -33,8 +33,8 @@ jobs: # Run python unittests - name: Lint the test directory run: | - ./manage_externals/checkout_externals uwtools - export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/uwtools:$(pwd)/ush/python_utils/uwtools/src + ./manage_externals/checkout_externals workflow-tools + export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src pylint --ignore-imports=yes tests/test_python/ pylint ush/create_*.py pylint ush/generate_FV3LAM_wflow.py diff --git a/.github/workflows/python_unittests.yaml b/.github/workflows/python_unittests.yaml index 9fdc317d4f..5e491dea6e 100644 --- a/.github/workflows/python_unittests.yaml +++ b/.github/workflows/python_unittests.yaml @@ -37,8 +37,8 @@ jobs: # Run python unittests - name: Run python unittests run: | - ./manage_externals/checkout_externals ufs-weather-model uwtools + ./manage_externals/checkout_externals ufs-weather-model workflow-tools # exclude test_retrieve_data that is tested in functional test - export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/uwtools:$(pwd)/ush/python_utils/uwtools/src + export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src python3 -m unittest -b tests/test_python/*.py diff --git a/Externals.cfg b/Externals.cfg index 895f88fc8c..988416144e 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -62,13 +62,13 @@ hash = 0a86f73 local_path = sorc/AQM-utils required = True -[uwtools] +[workflow-tools] protocol = git repo_url = https://github.com/ufs-community/workflow-tools # Specify either a branch name or a hash but not both. # branch = develop hash = e1b3b6f -local_path = ush/python_utils/uwtools +local_path = ush/python_utils/workflow-tools required = True [externals_description] diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 01d245a8f1..499a2e7ae3 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -391,12 +391,12 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c "${tmpfile}" \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater to generate a METplus +Call to workflow-tools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index feb3de6c27..ac96433363 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -371,12 +371,12 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c "${tmpfile}" \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater to generate a METplus +Call to workflow-tools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index e6ae8e3db4..6448a13fbf 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -358,12 +358,12 @@ EOF # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c "${tmpfile}" \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater to generate a METplus +Call to workflow-tools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index f208919783..5df39c2d9d 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -318,12 +318,12 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c "${tmpfile}" \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater to generate a METplus +Call to workflow-tools templater to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index a5a23181c6..b4505d752a 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -287,12 +287,12 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c ${tmpfile} \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater.py to generate a METplus +Call to workflow-tools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index c3b35068e8..1e89b20ade 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -341,12 +341,12 @@ EOF # Call the python script to generate the METplus configuration file from # the jinja template. # -python3 $USHdir/python_utils/uwtools/scripts/templater.py \ +python3 $USHdir/python_utils/workflow-tools/scripts/templater.py \ -c ${tmpfile} \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} || \ print_err_msg_exit "\ -Call to uwtools templater.py to generate a METplus +Call to workflow-tools templater.py to generate a METplus configuration file from a jinja template failed. Parameters passed to this script are: Full path to template METplus configuration file: From f627f088b48c67eff5596188467c2dd7b2c056c4 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 15:16:55 +0000 Subject: [PATCH 25/38] Add comments on where the import comes from. --- ush/create_aqm_rc_file.py | 1 + ush/create_diag_table_file.py | 1 + ush/create_model_configure_file.py | 1 + ush/create_nems_configure_file.py | 1 + ush/generate_FV3LAM_wflow.py | 2 ++ 5 files changed, 6 insertions(+) diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 50db26c0b1..e580a965b5 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -19,6 +19,7 @@ flatten_dict ) +# These come from ush/python_utils/workflow-tools from scripts.templater import set_template def create_aqm_rc_file(cdate, run_dir, init_concentrations): diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 8df137262e..3ef9ec3901 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -20,6 +20,7 @@ flatten_dict, ) +# These come from ush/python_utils/workflow-tools from scripts.templater import set_template diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index f8df5d7274..e39fff695e 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -20,6 +20,7 @@ flatten_dict, ) +# These come from ush/python_utils/workflow-tools from scripts.templater import set_template diff --git a/ush/create_nems_configure_file.py b/ush/create_nems_configure_file.py index fc447a9bb2..7a2ef0723f 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_nems_configure_file.py @@ -20,6 +20,7 @@ flatten_dict, ) +# These come from ush/python_utils/workflow-tools from scripts.templater import set_template def create_nems_configure_file(run_dir): diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 184b0ea21b..0ebedab431 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -31,6 +31,8 @@ from get_crontab_contents import add_crontab_line from set_namelist import set_namelist from check_python_version import check_python_version + +# These come from ush/python_utils/workflow-tools from scripts.templater import set_template # pylint: disable=too-many-locals,too-many-branches, too-many-statements From 6d033e446ad6eabe0260cdbb2c4c09f4293c18a5 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 15:20:38 +0000 Subject: [PATCH 26/38] Change the external name in .gitignore. --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index af17ca4b65..a727b940c0 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,7 @@ tests/WE2E/log.* ush/__pycache__/ ush/config.yaml ush/python_utils/__pycache__/ -ush/python_utils/uwtools/ +ush/python_utils/workflow-tools/ ush/*.swp *.swp From 11eb439e94d4d1f9df5520ed098452d13f60ce5f Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 15:39:41 +0000 Subject: [PATCH 27/38] PYTHONPATH is platform- and task-independent. Just use the one modulefile for all. --- modulefiles/set_pythonpath.lua | 4 ++-- modulefiles/tasks/cheyenne/python_srw.lua | 7 ------- modulefiles/tasks/gaea/python_srw.lua | 7 ------- modulefiles/tasks/hera/python_srw.lua | 7 ------- modulefiles/tasks/jet/python_srw.lua | 7 ------- modulefiles/tasks/noaacloud/python_srw.lua | 7 ------- modulefiles/tasks/orion/python_srw.lua | 7 ------- modulefiles/tasks/wcoss2/python_srw.lua | 7 ------- ush/load_modules_run_task.sh | 10 +++++++--- 9 files changed, 9 insertions(+), 54 deletions(-) diff --git a/modulefiles/set_pythonpath.lua b/modulefiles/set_pythonpath.lua index c6db556c43..e816ed6cfc 100644 --- a/modulefiles/set_pythonpath.lua +++ b/modulefiles/set_pythonpath.lua @@ -6,8 +6,8 @@ workflow tools to be imported whatis([===[Sets paths for using workflow-tools with SRW]===]) local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "/../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "/../ush/python_utils/uwtools/src/") +local uwtools_scripts_path = pathJoin(mod_path, "/../ush/python_utils/workflow-tools") +local uwtools_package_path = pathJoin(mod_path, "/../ush/python_utils/workflow-tools/src/") prepend_path("PYTHONPATH", uwtools_scripts_path) prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua index 3f7548609d..2263141a0a 100644 --- a/modulefiles/tasks/cheyenne/python_srw.lua +++ b/modulefiles/tasks/cheyenne/python_srw.lua @@ -3,10 +3,3 @@ prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles" load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua index d3cd294aed..74336ca1cf 100644 --- a/modulefiles/tasks/gaea/python_srw.lua +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -2,10 +2,3 @@ prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/hera/python_srw.lua b/modulefiles/tasks/hera/python_srw.lua index fb412d10ce..7934169824 100644 --- a/modulefiles/tasks/hera/python_srw.lua +++ b/modulefiles/tasks/hera/python_srw.lua @@ -2,10 +2,3 @@ prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefil load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/jet/python_srw.lua b/modulefiles/tasks/jet/python_srw.lua index d10b86a6b8..ef4f248966 100644 --- a/modulefiles/tasks/jet/python_srw.lua +++ b/modulefiles/tasks/jet/python_srw.lua @@ -2,10 +2,3 @@ prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefil load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua index 7c972640c2..602d60842f 100644 --- a/modulefiles/tasks/noaacloud/python_srw.lua +++ b/modulefiles/tasks/noaacloud/python_srw.lua @@ -1,8 +1 @@ prepend_path("PATH", "/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua index 96a80e9bfc..5b7b0afc57 100644 --- a/modulefiles/tasks/orion/python_srw.lua +++ b/modulefiles/tasks/orion/python_srw.lua @@ -3,10 +3,3 @@ append_path("MODULEPATH","/work/noaa/epic-ps/role-epic-ps/miniconda3/modulefiles load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) setenv("SRW_ENV", "regional_workflow") - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/modulefiles/tasks/wcoss2/python_srw.lua b/modulefiles/tasks/wcoss2/python_srw.lua index 55b02d0a6c..519f1cdf4a 100644 --- a/modulefiles/tasks/wcoss2/python_srw.lua +++ b/modulefiles/tasks/wcoss2/python_srw.lua @@ -1,10 +1,3 @@ load(pathJoin("intel", os.getenv("intel_ver"))) load(pathJoin("python", os.getenv("python_ver"))) load(pathJoin("prod_util", os.getenv("prod_util_ver"))) - -local mod_path, mod_file = splitFileName(myFileName()) -local uwtools_scripts_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools") -local uwtools_package_path = pathJoin(mod_path, "../../../ush/python_utils/uwtools/src/") - -prepend_path("PYTHONPATH", uwtools_scripts_path) -prepend_path("PYTHONPATH", uwtools_package_path) diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 8d00a8fe65..086f24db98 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -98,15 +98,18 @@ set -u # #----------------------------------------------------------------------- # +default_modules_dir="$HOMEdir/modulefiles" machine=$(echo_lowercase $MACHINE) source "${HOMEdir}/etc/lmod-setup.sh" ${machine} if [ "${machine}" != "wcoss2" ]; then - module use "${HOMEdir}/modulefiles" + module use "${default_modules_dir}" module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) for the workflow task specified by task_name failed: task_name = \"${task_name}\" BUILD_MOD_FN = \"${BUILD_MOD_FN}\"" + module load set_pythonpath || print_err_msg_exit "\ + Loading the module to set PYTHONPATH for workflow-tools failed." fi # #----------------------------------------------------------------------- @@ -134,9 +137,8 @@ fi # #----------------------------------------------------------------------- # -modules_dir="$HOMEdir/modulefiles/tasks/$machine" +modules_dir="$default_modules_dir/tasks/$machine" modulefile_name="${task_name}" -default_modules_dir="$HOMEdir/modulefiles" # #----------------------------------------------------------------------- # @@ -173,6 +175,8 @@ elif [ -f ${modules_dir}/python_srw.lua ] ; then modules_dir = \"${modules_dir}\"" fi + + module list # Modules that use conda and need an environment activated will set the From a6b0232483a1a1afcf3c1b27838c393e0be0df20 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 15:45:41 +0000 Subject: [PATCH 28/38] Change all conda envs to workflow_tools --- modulefiles/tasks/cheyenne/python_srw.lua | 2 +- modulefiles/tasks/gaea/python_srw.lua | 2 +- modulefiles/tasks/hera/python_srw.lua | 2 +- modulefiles/tasks/jet/python_srw.lua | 2 +- modulefiles/tasks/orion/python_srw.lua | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua index 2263141a0a..57e2c2eed0 100644 --- a/modulefiles/tasks/cheyenne/python_srw.lua +++ b/modulefiles/tasks/cheyenne/python_srw.lua @@ -2,4 +2,4 @@ unload("python") prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) -setenv("SRW_ENV", "regional_workflow") +setenv("SRW_ENV", "workflow_tools") diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua index 74336ca1cf..c0f454fa71 100644 --- a/modulefiles/tasks/gaea/python_srw.lua +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -1,4 +1,4 @@ prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) -setenv("SRW_ENV", "regional_workflow") +setenv("SRW_ENV", "workflow_tools") diff --git a/modulefiles/tasks/hera/python_srw.lua b/modulefiles/tasks/hera/python_srw.lua index 7934169824..62ddf7d9e8 100644 --- a/modulefiles/tasks/hera/python_srw.lua +++ b/modulefiles/tasks/hera/python_srw.lua @@ -1,4 +1,4 @@ prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) -setenv("SRW_ENV", "regional_workflow") +setenv("SRW_ENV", "workflow_tools") diff --git a/modulefiles/tasks/jet/python_srw.lua b/modulefiles/tasks/jet/python_srw.lua index ef4f248966..3c7987be18 100644 --- a/modulefiles/tasks/jet/python_srw.lua +++ b/modulefiles/tasks/jet/python_srw.lua @@ -1,4 +1,4 @@ prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) -setenv("SRW_ENV", "regional_workflow") +setenv("SRW_ENV", "workflow_tools") diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua index 5b7b0afc57..c7e25d3ad4 100644 --- a/modulefiles/tasks/orion/python_srw.lua +++ b/modulefiles/tasks/orion/python_srw.lua @@ -2,4 +2,4 @@ unload("python") append_path("MODULEPATH","/work/noaa/epic-ps/role-epic-ps/miniconda3/modulefiles") load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) -setenv("SRW_ENV", "regional_workflow") +setenv("SRW_ENV", "workflow_tools") From 13036c98d589b3e0f26bdde7399cf06d7851bcfa Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 16:31:45 +0000 Subject: [PATCH 29/38] Plotting still needs regional_workflow env. --- modulefiles/tasks/cheyenne/plot_allvars.local.lua | 5 +++++ modulefiles/tasks/gaea/plot_allvars.local.lua | 4 ++++ modulefiles/tasks/hera/plot_allvars.local.lua | 4 ++++ modulefiles/tasks/jet/plot_allvars.local.lua | 4 ++++ modulefiles/tasks/noaacloud/plot_allvars.local.lua | 1 + modulefiles/tasks/orion/plot_allvars.local.lua | 5 +++++ 6 files changed, 23 insertions(+) create mode 100644 modulefiles/tasks/cheyenne/plot_allvars.local.lua create mode 100644 modulefiles/tasks/gaea/plot_allvars.local.lua create mode 100644 modulefiles/tasks/hera/plot_allvars.local.lua create mode 100644 modulefiles/tasks/jet/plot_allvars.local.lua create mode 100644 modulefiles/tasks/noaacloud/plot_allvars.local.lua create mode 100644 modulefiles/tasks/orion/plot_allvars.local.lua diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua new file mode 100644 index 0000000000..2263141a0a --- /dev/null +++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua @@ -0,0 +1,5 @@ +unload("python") +prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua new file mode 100644 index 0000000000..74336ca1cf --- /dev/null +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -0,0 +1,4 @@ +prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua new file mode 100644 index 0000000000..7934169824 --- /dev/null +++ b/modulefiles/tasks/hera/plot_allvars.local.lua @@ -0,0 +1,4 @@ +prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua new file mode 100644 index 0000000000..ef4f248966 --- /dev/null +++ b/modulefiles/tasks/jet/plot_allvars.local.lua @@ -0,0 +1,4 @@ +prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua new file mode 100644 index 0000000000..602d60842f --- /dev/null +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -0,0 +1 @@ +prepend_path("PATH", "/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua new file mode 100644 index 0000000000..5b7b0afc57 --- /dev/null +++ b/modulefiles/tasks/orion/plot_allvars.local.lua @@ -0,0 +1,5 @@ +unload("python") +append_path("MODULEPATH","/work/noaa/epic-ps/role-epic-ps/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_ENV", "regional_workflow") From a278ab5a4da6f2dc18b06ddcc14cf4e4a0b93020 Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Mon, 22 May 2023 12:26:32 -0600 Subject: [PATCH 30/38] Update ush/generate_FV3LAM_wflow.py Co-authored-by: Michael Kavulich --- ush/generate_FV3LAM_wflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 0ebedab431..9cbefb69f6 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -823,7 +823,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logging.getLogger().addHandler(fh) - logging.debug("Finished setting up debug file logging in {logfile}") + logging.debug(f"Finished setting up debug file logging in {logfile}") # If there are already multiple handlers, that means # generate_FV3LAM_workflow was called from another function. From 15285be71ae3dfa7d8c2ea4ef463f4fd9b3bd2ab Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 18:49:44 +0000 Subject: [PATCH 31/38] Allow f-strings in log messages. --- .pylintrc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.pylintrc b/.pylintrc index a4bfcd4b2f..0cb488e3d5 100644 --- a/.pylintrc +++ b/.pylintrc @@ -387,7 +387,7 @@ preferred-modules= # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. -logging-format-style=old +logging-format-style=new # Logging modules to check that the string format arguments are in logging # function parameter format. @@ -421,7 +421,8 @@ disable=raw-checker-failed, suppressed-message, useless-suppression, deprecated-pragma, - use-symbolic-message-instead + use-symbolic-message-instead, + logging-fstring-interpolation # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option From 1d47cd63d8502d84edfe4ba428a80a9fc14a4993 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 19:26:41 +0000 Subject: [PATCH 32/38] Find the right solution for exit status and log. --- ush/generate_FV3LAM_wflow.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 9cbefb69f6..9a0ad80762 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -10,6 +10,7 @@ import os import logging from textwrap import dedent +import sys from python_utils import ( log_info, @@ -862,7 +863,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals """ ) ) - raise + sys.exit(1) # pylint: disable=undefined-variable # Note workflow generation completion From ef7514c58a7eec7bc03a8fc832cebf2a2aeaf62c Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 22 May 2023 19:48:02 +0000 Subject: [PATCH 33/38] Ignore since we don't have custom exceptions. --- ush/generate_FV3LAM_wflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 9a0ad80762..9bef3e5a42 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -850,7 +850,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals # experiment/workflow. try: expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile) - except: + except: # pylint: disable=bare-except logging.exception( dedent( f""" From 54e97cc3cb77e7f028395833a96b3578e3ec2c80 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 24 May 2023 19:43:42 +0000 Subject: [PATCH 34/38] Address Chan-hoo's comments. --- ush/create_aqm_rc_file.py | 2 +- ush/load_modules_run_task.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index e580a965b5..3a83d5e516 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -136,7 +136,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): [ "-q", "-c", - tmpfile, + tmpfile.name, "-i", AQM_RC_TMPL_FP, "-o", diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 086f24db98..4dee14d35c 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -108,9 +108,9 @@ if [ "${machine}" != "wcoss2" ]; then for the workflow task specified by task_name failed: task_name = \"${task_name}\" BUILD_MOD_FN = \"${BUILD_MOD_FN}\"" - module load set_pythonpath || print_err_msg_exit "\ - Loading the module to set PYTHONPATH for workflow-tools failed." fi +module load set_pythonpath || print_err_msg_exit "\ + Loading the module to set PYTHONPATH for workflow-tools failed." # #----------------------------------------------------------------------- # From 1fdd508548e4c1b925e8f543c65ef3ad9dbe480d Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 24 May 2023 19:51:18 +0000 Subject: [PATCH 35/38] Accidently reverted mkavulich's recent change. --- ush/generate_FV3LAM_wflow.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 9bef3e5a42..5022bfed4c 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -869,14 +869,12 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals # Note workflow generation completion log_info( f""" - ======================================================================== ======================================================================== Experiment generation completed. The experiment directory is: EXPTDIR='{EXPTDIR}' - ======================================================================== ======================================================================== """ ) From 7d8f24f3983c512e49d8eb48c3fe88e0c6d5e98d Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 25 May 2023 18:21:05 +0000 Subject: [PATCH 36/38] Module use should also be done for all platforms. --- ush/load_modules_run_task.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 4dee14d35c..a3b8aaca5a 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -101,14 +101,16 @@ set -u default_modules_dir="$HOMEdir/modulefiles" machine=$(echo_lowercase $MACHINE) source "${HOMEdir}/etc/lmod-setup.sh" ${machine} +module use "${default_modules_dir}" + if [ "${machine}" != "wcoss2" ]; then - module use "${default_modules_dir}" module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) for the workflow task specified by task_name failed: task_name = \"${task_name}\" BUILD_MOD_FN = \"${BUILD_MOD_FN}\"" fi + module load set_pythonpath || print_err_msg_exit "\ Loading the module to set PYTHONPATH for workflow-tools failed." # From 54bb06435e75e3e8d1ba01c33780a3feedc14eca Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Wed, 31 May 2023 14:36:31 +0000 Subject: [PATCH 37/38] Address yaml issue with aqm yaml file. --- ush/create_aqm_rc_file.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 3a83d5e516..02d8d827e5 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -129,7 +129,11 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # #----------------------------------------------------------------------- # - with tempfile.NamedTemporaryFile(dir="./", mode="w+t", prefix="aqm_rc_settings") as tmpfile: + with tempfile.NamedTemporaryFile( + dir="./", + mode="w+t", + prefix="aqm_rc_settings", + suffix=".yaml") as tmpfile: tmpfile.write(settings_str) tmpfile.seek(0) set_template( From 5a074029fb7d2c9a977cd3d5d34c0777aa383cc3 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Jun 2023 18:25:41 +0000 Subject: [PATCH 38/38] Using workflow_tools for workflow instead of regional_workflow --- modulefiles/tasks/jet/run_vx.local.lua | 4 +--- modulefiles/wflow_cheyenne.lua | 2 +- modulefiles/wflow_gaea.lua | 2 +- modulefiles/wflow_hera.lua | 2 +- modulefiles/wflow_jet.lua | 2 +- modulefiles/wflow_linux.lua | 2 +- modulefiles/wflow_macos.lua | 2 +- modulefiles/wflow_odin.lua | 2 +- modulefiles/wflow_orion.lua | 2 +- modulefiles/wflow_singularity.lua | 2 +- 10 files changed, 10 insertions(+), 12 deletions(-) diff --git a/modulefiles/tasks/jet/run_vx.local.lua b/modulefiles/tasks/jet/run_vx.local.lua index 13b6b4b954..750fd7603e 100644 --- a/modulefiles/tasks/jet/run_vx.local.lua +++ b/modulefiles/tasks/jet/run_vx.local.lua @@ -1,3 +1 @@ -append_path("MODULEPATH", "/contrib/anaconda/modulefiles") -load(pathJoin("intel", os.getenv("intel_ver") or "18.0.5.274")) -load(pathJoin("anaconda", os.getenv("anaconda_ver") or "5.3.1")) +load("python_srw") diff --git a/modulefiles/wflow_cheyenne.lua b/modulefiles/wflow_cheyenne.lua index 094bf0918a..9a7a37c0b5 100644 --- a/modulefiles/wflow_cheyenne.lua +++ b/modulefiles/wflow_cheyenne.lua @@ -18,7 +18,7 @@ load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_gaea.lua b/modulefiles/wflow_gaea.lua index 96efa3c31c..f623acd15b 100644 --- a/modulefiles/wflow_gaea.lua +++ b/modulefiles/wflow_gaea.lua @@ -15,6 +15,6 @@ setenv("PROJ_LIB", "/lustre/f2/dev/role.epic/contrib/miniconda3/4.12.0/envs/regi if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_hera.lua b/modulefiles/wflow_hera.lua index 3e981921b2..5d6ebeed1d 100644 --- a/modulefiles/wflow_hera.lua +++ b/modulefiles/wflow_hera.lua @@ -13,6 +13,6 @@ load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_jet.lua b/modulefiles/wflow_jet.lua index 3ffcd68414..5f109429dc 100644 --- a/modulefiles/wflow_jet.lua +++ b/modulefiles/wflow_jet.lua @@ -13,6 +13,6 @@ load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_linux.lua b/modulefiles/wflow_linux.lua index 74f7c8edb6..3fb5d1123a 100644 --- a/modulefiles/wflow_linux.lua +++ b/modulefiles/wflow_linux.lua @@ -37,6 +37,6 @@ load("set_pythonpath") -- display conda activation message if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_macos.lua b/modulefiles/wflow_macos.lua index ddd3b60ae6..6ee6022b20 100644 --- a/modulefiles/wflow_macos.lua +++ b/modulefiles/wflow_macos.lua @@ -37,7 +37,7 @@ load("set_pythonpath") -- display conda activation message if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda virtual environment: - > conda activate regional_workflow" + > conda activate workflow_tools" ]===]) end diff --git a/modulefiles/wflow_odin.lua b/modulefiles/wflow_odin.lua index 886d072d06..f042b00894 100644 --- a/modulefiles/wflow_odin.lua +++ b/modulefiles/wflow_odin.lua @@ -30,6 +30,6 @@ if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: > conda config --set changeps1 False - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua index b56ae2a69b..46da63f5d4 100644 --- a/modulefiles/wflow_orion.lua +++ b/modulefiles/wflow_orion.lua @@ -16,7 +16,7 @@ load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate regional_workflow + > conda activate workflow_tools ]===]) end diff --git a/modulefiles/wflow_singularity.lua b/modulefiles/wflow_singularity.lua index 7d097afa28..309c5eac23 100644 --- a/modulefiles/wflow_singularity.lua +++ b/modulefiles/wflow_singularity.lua @@ -10,5 +10,5 @@ append_path("MODULEPATH","/opt/hpc-modules/modulefiles/core") load("miniconda3") if mode() == "load" then - execute{cmd="conda activate regional_workflow", modeA={"load"}} + execute{cmd="conda activate workflow_tools", modeA={"load"}} end