diff --git a/Makefile.am b/Makefile.am index f3c2b7a83a..a1e75367dd 100644 --- a/Makefile.am +++ b/Makefile.am @@ -18,7 +18,7 @@ ACLOCAL_AMFLAGS = -I m4 -SUBDIRS = data src scripts/Rscripts scripts/python scripts/utility +SUBDIRS = data src scripts/Rscripts scripts/python if ENABLE_DEVELOPMENT SUBDIRS += internal/test_util diff --git a/Makefile.in b/Makefile.in index 8236187480..aa3a1d74ff 100644 --- a/Makefile.in +++ b/Makefile.in @@ -158,7 +158,7 @@ ETAGS = etags CTAGS = ctags CSCOPE = cscope DIST_SUBDIRS = data src scripts/Rscripts scripts/python \ - scripts/utility internal/test_util + internal/test_util am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.h.in INSTALL \ compile config.guess config.sub depcomp install-sh missing \ ylwrap @@ -350,8 +350,7 @@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ ACLOCAL_AMFLAGS = -I m4 -SUBDIRS = data src scripts/Rscripts scripts/python scripts/utility \ - $(am__append_1) +SUBDIRS = data src scripts/Rscripts scripts/python $(am__append_1) MAINTAINERCLEANFILES = \ Makefile.in \ aclocal.m4 \ diff --git a/configure b/configure index d47929b214..87899cbbeb 100755 --- a/configure +++ b/configure @@ -8874,7 +8874,7 @@ done # Create configured files -ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/wrappers/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile" +ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/python/examples/Makefile scripts/python/met/Makefile scripts/python/pyembed/Makefile scripts/python/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile" if test -n "$MET_DEVELOPMENT"; then @@ -9762,7 +9762,10 @@ do "scripts/Rscripts/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/Rscripts/Makefile" ;; "scripts/Rscripts/include/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/Rscripts/include/Makefile" ;; "scripts/python/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/Makefile" ;; - "scripts/utility/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/utility/Makefile" ;; + "scripts/python/examples/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/examples/Makefile" ;; + "scripts/python/met/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/met/Makefile" ;; + "scripts/python/pyembed/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/pyembed/Makefile" ;; + "scripts/python/utility/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/utility/Makefile" ;; "data/Makefile") CONFIG_FILES="$CONFIG_FILES data/Makefile" ;; "data/climo/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/Makefile" ;; "data/climo/seeps/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/seeps/Makefile" ;; @@ -9774,7 +9777,6 @@ do "data/poly/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/Makefile" ;; "data/poly/HMT_masks/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/HMT_masks/Makefile" ;; "data/poly/NCEP_masks/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/NCEP_masks/Makefile" ;; - "data/wrappers/Makefile") CONFIG_FILES="$CONFIG_FILES data/wrappers/Makefile" ;; "data/ps/Makefile") CONFIG_FILES="$CONFIG_FILES data/ps/Makefile" ;; "data/table_files/Makefile") CONFIG_FILES="$CONFIG_FILES data/table_files/Makefile" ;; "data/tc_data/Makefile") CONFIG_FILES="$CONFIG_FILES data/tc_data/Makefile" ;; diff --git a/configure.ac b/configure.ac index 60cc07bcd7..f1561991ee 100644 --- a/configure.ac +++ b/configure.ac @@ -1204,7 +1204,10 @@ AC_CONFIG_FILES([Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile - scripts/utility/Makefile + scripts/python/examples/Makefile + scripts/python/met/Makefile + scripts/python/pyembed/Makefile + scripts/python/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile @@ -1216,7 +1219,6 @@ AC_CONFIG_FILES([Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile - data/wrappers/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile diff --git a/data/Makefile.am b/data/Makefile.am index ac1483ba59..6cebff2c79 100644 --- a/data/Makefile.am +++ b/data/Makefile.am @@ -24,8 +24,7 @@ SUBDIRS = \ poly \ ps \ table_files \ - tc_data \ - wrappers + tc_data topdir = $(pkgdatadir) diff --git a/data/Makefile.in b/data/Makefile.in index acf549d2de..2bf69df44f 100644 --- a/data/Makefile.in +++ b/data/Makefile.in @@ -362,8 +362,7 @@ SUBDIRS = \ poly \ ps \ table_files \ - tc_data \ - wrappers + tc_data topdir = $(pkgdatadir) top_DATA = \ diff --git a/data/wrappers/read_tmp_dataplane.py b/data/wrappers/read_tmp_dataplane.py deleted file mode 100644 index 98bbe728d8..0000000000 --- a/data/wrappers/read_tmp_dataplane.py +++ /dev/null @@ -1,37 +0,0 @@ -######################################################################## -# -# Reads temporary file into memory. -# -# usage: /path/to/python read_tmp_dataplane.py dataplane.tmp -# -######################################################################## - -import sys -import numpy as np -import netCDF4 as nc - -met_info = {} -netcdf_filename = sys.argv[1] - -# read NetCDF file -ds = nc.Dataset(netcdf_filename, 'r') -met_data = ds['met_data'][:] -met_attrs = {} - -# grid is defined as a dictionary or string -grid = {} -for attr, attr_val in ds.__dict__.items(): - if 'grid.' in attr: - grid_attr = attr.split('.')[1] - grid[grid_attr] = attr_val - else: - met_attrs[attr] = attr_val - -if grid: - met_attrs['grid'] = grid - -met_attrs['name'] = met_attrs['name_str'] -del met_attrs['name_str'] -met_info['met_data'] = met_data -met_info['attrs'] = met_attrs - diff --git a/data/wrappers/read_tmp_point_nc.py b/data/wrappers/read_tmp_point_nc.py deleted file mode 100644 index 0ef8eefc3a..0000000000 --- a/data/wrappers/read_tmp_point_nc.py +++ /dev/null @@ -1,26 +0,0 @@ -######################################################################## -# -# Reads temporary point obs. file into memory. -# -# usage: /path/to/python read_tmp_point_nc.py tmp_output_filename -# -######################################################################## - -import os -import sys - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) -from met_point_obs import met_point_obs -from met_point_obs_nc import nc_point_obs - -netcdf_filename = sys.argv[1] - -# read NetCDF file -print('{p} reading{f}'.format(p=met_point_obs.get_prompt(), f=netcdf_filename)) -point_obs_data = nc_point_obs() -point_obs_data.read_data(netcdf_filename) - -met_point_data = point_obs_data.get_point_data() -met_point_data['met_point_data'] = point_obs_data diff --git a/data/wrappers/write_tmp_dataplane.py b/data/wrappers/write_tmp_dataplane.py deleted file mode 100644 index 476d2348c3..0000000000 --- a/data/wrappers/write_tmp_dataplane.py +++ /dev/null @@ -1,75 +0,0 @@ -######################################################################## -# -# Adapted from a script provided by George McCabe -# Adapted by Randy Bullock -# -# usage: /path/to/python write_tmp_dataplane.py \ -# tmp_output_filename .py -# -######################################################################## - -import os -import sys -import importlib.util -import netCDF4 as nc - -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) - -netcdf_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -met_info = {'met_data': met_in.met_data} -if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: - attrs = met_in.met_data.attrs -else: - attrs = met_in.attrs -met_info['attrs'] = attrs - -# determine fill value -try: - fill = met_data.get_fill_value() -except: - fill = -9999. - -# write NetCDF file -ds = nc.Dataset(netcdf_filename, 'w') - -# create dimensions and variable -nx, ny = met_in.met_data.shape -ds.createDimension('x', nx) -ds.createDimension('y', ny) -dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) -dp[:] = met_in.met_data - -# append attributes -for attr, attr_val in met_info['attrs'].items(): - if attr == 'name': - setattr(ds, 'name_str', attr_val) - elif type(attr_val) == dict: - for key in attr_val: - setattr(ds, attr + '.' + key, attr_val[key]) - else: - setattr(ds, attr, attr_val) - -ds.close() diff --git a/data/wrappers/write_tmp_mpr.py b/data/wrappers/write_tmp_mpr.py deleted file mode 100644 index 3eee0379f5..0000000000 --- a/data/wrappers/write_tmp_mpr.py +++ /dev/null @@ -1,43 +0,0 @@ -######################################################################## -# -# Adapted from a script provided by George McCabe -# Adapted by Randy Bullock -# -# usage: /path/to/python write_tmp_mpr.py \ -# tmp_output_filename .py -# -######################################################################## - -import os -import sys -import importlib.util - -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) - -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -f = open(tmp_filename, 'w') -for line in met_in.mpr_data: - f.write(str(line) + '\n') diff --git a/data/wrappers/write_tmp_point.py b/data/wrappers/write_tmp_point.py deleted file mode 100644 index 916fca5549..0000000000 --- a/data/wrappers/write_tmp_point.py +++ /dev/null @@ -1,43 +0,0 @@ -######################################################################## -# -# Adapted from a script provided by George McCabe -# Adapted by Randy Bullock -# -# usage: /path/to/python write_tmp_point.py \ -# tmp_output_filename .py -# -######################################################################## - -import os -import sys -import importlib.util - -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) - -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -f = open(tmp_filename, 'w') -for line in met_in.point_data: - f.write(str(line) + '\n') diff --git a/data/wrappers/write_tmp_point_nc.py b/data/wrappers/write_tmp_point_nc.py deleted file mode 100644 index 063a2e98cc..0000000000 --- a/data/wrappers/write_tmp_point_nc.py +++ /dev/null @@ -1,55 +0,0 @@ -######################################################################## -# -# Adapted from a script provided by George McCabe -# Adapted by Howard Soh -# -# usage: /path/to/python write_tmp_point_nc.py \ -# tmp_output_filename .py -# -######################################################################## - -import os -import sys -import importlib.util - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -from met_point_obs import met_point_obs -from met_point_obs_nc import nc_point_obs - -PROMPT = met_point_obs.get_prompt() -print("{p} Python Script:\t".format(p=PROMPT) + repr(sys.argv[0])) -print("{p} User Command:\t".format(p=PROMPT) + repr(' '.join(sys.argv[2:]))) -print("{p} Temporary File:\t".format(p=PROMPT) + repr(sys.argv[1])) - -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -if hasattr(met_in, 'point_obs_data'): - met_in.point_obs_data.save_ncfile(tmp_filename) -else: - if hasattr(met_in.met_point_data, 'point_obs_data'): - met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) - else: - tmp_point_obs = nc_point_obs() - tmp_point_obs.put_data(met_in.met_point_data) - tmp_point_obs.save_ncfile(tmp_filename) - -#print('{p} writing {f}'.format(p=PROMPT, f=tmp_filename)) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index c490cc07e3..f028c87326 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -7,118 +7,182 @@ Appendix F Python Embedding Introduction ============ -MET includes the ability to embed Python to a limited degree. Users may use Python scripts and whatever associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. +MET includes the ability to embed Python to a limited degree. Users may use their own Python scripts and any associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. .. _compiling_python_support: -Compiling Python Support -======================== +Compiling MET for Python Embedding +================================== -In order to use Python embedding, the user's local Python installation must have the C-language Python header files and libraries. Sometimes when Python is installed locally, these header files and libraries are deleted at the end of the installation process, leaving only the binary executable and run-time shared object files. But the Python header files and libraries must be present to compile support in MET for Python embedding. Assuming the requisite Python files are present, and that Python embedding is enabled when building MET (which is done by passing the **--enable-python** option to the **configure** command line), the MET C++ code will use these in the compilation process to link directly to the Python libraries. +In order to use Python embedding, a local Python installation must be available when compiling the MET software with the following requirements: -The local Python installation must also support a minimum set of required packages. The MET build includes some python wrapper scripts to facilitate the passing of data in memory as well as the reading and writing of temporary files. The packages required by those wrapper scripts are **sys, os, argparse, importlib, numpy and netCDF4**. While most of these are standard packages and readily available, numpy and netCDF4 may not be. Users are advised to confirm their availability prior to compiling MET with python embedding support. +1. Python version 3.10.4+ -In addition to the **configure** option mentioned above, three variables, **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**, must also be set for the configuration process. These may either be set as environment variables or as command line options to **configure**. These constants are passed as compiler command line options when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: +2. C-language Python header files and libraries -• Set **MET_PYTHON_BIN_EXE** to the full path of the desired python executable. +3. **NumPy** Python package -• On the command line, run "**python3-config --cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. +4. **netCDF4** Python package -• Again on the command line, run "**python3-config --ldflags**". Set the value of **MET_PYTHON_LD** to the output of that command. +5. **Pandas** Python package -Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. +6. **Xarray** Python package +Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process, and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** option to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by the Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files when Python embedding is used. -MET_PYTHON_EXE -============== +In addition to using **\-\-enable-python** with **configure** as mentioned above, the following environment variables must also be set prior to executing **configure**: **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**. These may either be set as environment variables or as command line options to **configure**. These environment variables are used when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable in the users local Python installation, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: -When Python embedding support is compiled, MET instantiates the Python interpreter directly. However, for users of highly configurable Conda environments, the Python instance set at compilation time may not be sufficient. Users may want to switch between Conda environments for which different packages are available. MET version 9.0 has been enhanced to address this need. +• Set **MET_PYTHON_BIN_EXE** to the full path of the desired Python executable. -The types of Python embedding supported in MET are described below. In all cases, by default, the compiled Python instance is used to execute the Python script. If the packages that script imports are not available for the compiled Python instance, users will encounter a runtime error. In the event of a runtime error, users are advised to set the **MET_PYTHON_EXE** environment variable and rerun. This environment variable should be set to the full path to the version of Python you would like to use. See an example below. +• On the command line, run "**python3-config \-\-cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. + +• Again on the command line, run "**python3-config \-\-ldflags \-\-embed**". Set the value of **MET_PYTHON_LD** to the output of that command. + +Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure** + +If a user attempts to invoke Python embedding with a version of MET that was not compiled with Python, MET will return an ERROR: .. code-block:: none + :caption: MET Errors Without Python Enabled + + ERROR : Met2dDataFileFactory::new_met_2d_data_file() -> Support for Python has not been compiled! + ERROR : To run Python scripts, recompile with the --enable-python option. + + - or - - export MET_PYTHON_EXE=/usr/local/python3/bin/python3 + ERROR : process_point_obs() -> Support for Python has not been compiled! + ERROR : To run Python scripts, recompile with the --enable-python option. + +Controlling Which Python MET Uses When Running +============================================== + +When MET is compiled with Python embedding support, MET uses the Python executable in that Python installation by default when Python embedding is used. However, for users of highly configurable Python environments, the Python instance set at compilation time may not be sufficient. Users may want to use an alternate Python installation if they need additional packages not available in the Python installation used when compiling MET. In MET versions 9.0+, users have the ability to use a different Python executable when running MET than the version used when compiling MET by setting the environment variable **MET_PYTHON_EXE**. + +If a user's Python script requires packages that are not available in the Python installation used when compiling the MET software, they will encounter a runtime error when using MET. In this instance, the user will need to change the Python MET is using to a different installation with the required packages for their script. It is the responsibility of the user to manage this Python installation, and one popular approach is to use a custom Anaconda (Conda) Python environment. Once the Python installation meeting the user's requirements is available, the user can force MET to use it by setting the **MET_PYTHON_EXE** environment variable to the full path of the Python executable in that installation. For example: + +.. code-block:: none + :caption: Setting MET_PYTHON_EXE -Setting this environment variable triggers slightly different processing logic in MET. Rather than executing the user-specified script with compiled Python instance directly, MET does the following: + export MET_PYTHON_EXE=/usr/local/python3/bin/python3 + +Setting this environment variable triggers slightly different processing logic in MET than when MET uses the Python installation that was used when compiling MET. When using the Python installation that was used when compiling MET, Python is called directly and data are passed in memory from Python to the MET tools. When the user sets **MET_PYTHON_EXE**, MET does the following: 1. Wrap the user's Python script and arguments with a wrapper script (write_tmp_mpr.py, write_tmp_point.py, or write_tmp_dataplane.py) and specify the name of a temporary file to be written. 2. Use a system call to the **MET_PYTHON_EXE** Python instance to execute these commands and write the resulting data objects to a temporary ASCII or NetCDF file. -3. Use the compiled Python instance to run a wrapper script (read_tmp_ascii.py or read_tmp_dataplane.py) to read data from that temporary file. +3. Use the Python instance that MET was compiled with to run a wrapper script (read_tmp_ascii.py or read_tmp_dataplane.py) to read data from that temporary file. -With this approach, users should be able to execute Python scripts in their own custom environments. +With this approach, users are able to execute Python scripts using their own custom Python installations. -.. _pyembed-2d-data: +.. _pyembed-data-structures: -Python Embedding for 2D data -============================ +Data Structures Supported by Python Embedding +============================================= -We now describe how to write Python scripts so that the MET tools may extract 2D gridded data fields from them. Currently, MET offers two ways to interact with Python scripts: by using NumPy N-dimensional arrays (ndarrays) or by using Xarray DataArrays. The interface to be used (NumPy or Xarray) is specified on the command line (more on this later). The user's scripts can use any Python libraries that are supported by the local Python installation, or any personal or institutional libraries or code that are desired in order to implement the Python script, so long as the data has been loaded into either a NumPy ndarray or an Xarray DataArray by the end of the script. This offers advantages when using data file formats that MET does not directly support. If there is Python code to read the data format, the user can use those tools to read the data, and then copy the data into a NumPy ndarray or an Xarray DataArray. MET can then ingest the data via the Python script. Note that whether a NumPy ndarray or an Xarray DataArray is used, the data should be stored as double precision floating point numbers. Using different data types, such as integers or single precision floating point numbers, will lead to unexpected results in MET. +Python embedding with MET tools offers support for three different types of data structures: -**Using NumPy N-dimensional Arrays** +1. Two-dimensional (2D) gridded dataplanes -The data must be loaded into a 2D NumPy ndarray named **met_data**. In addition there must be a Python dictionary named **attrs** which contains metadata such as timestamps, grid projection and other information. Here is an example **attrs** dictionary: +2. Point data conforming to the :ref:`MET 11-column format` -.. code-block:: none +3. Matched-pair data conforming to the :ref:`MET MPR Line Type` - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': 'Foo', - 'long_name': 'FooBar', - 'level': 'Surface', - 'units': 'None', - - # Define 'grid' as a string or a dictionary - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - 'name': 'FooGrid', - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - 'lat_pin': 12.19, - 'lon_pin': -135.459, - 'x_pin': 0.0, - 'y_pin': 0.0, - 'lon_orient': -95.0, - 'd_km': 40.635, - 'r_km': 6371.2, - 'nx': 185, - 'ny': 129, - } - - } - -In the **attrs** dictionary, valid time, initialization time, lead time and accumulation time (if any) must be indicated by strings. Valid and initialization times must be given in YYYYMMDD[_HH[MMSS]] format, and lead and accumulation times must be given in HH[MMSS] format, where the square brackets indicate optional elements. The dictionary must also include strings for the name, long_name, level, and units to describe the data. The rest of the **attrs** dictionary gives the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. Those entries are also listed below. Note that the **grid** entry in the **attrs** dictionary can either be defined as a string or as a dictionary itself. - -If specified as a string, **grid** can be defined as follows: - -• As a named grid: +Details for each of these data structures are provided below. + +.. note:: + + All sample commands and directories listed below are relative to the top level of the MET source code directory. + +.. _pyembed-2d-data: + +Python Embedding for 2D Gridded Dataplanes +------------------------------------------ + +Currently, MET supports two different types of Python objects for two-dimensional gridded dataplanes: NumPy N-dimensional arrays (ndarrays) and Xarray DataArrays. The keyword **PYTHON_NUMPY** is used on the command line when using ndarrays, and **PYTHON_XARRAY** when using Xarray DataArrays. Example commands are included at the end of this section. + +Python Script Requirements for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The data must be stored in a variable with the name **met_data** + +2. The **met_data** variable must be of type **Xarray DataArray** or **NumPy N-D Array** + +3. The data inside the **met_data** variable must be **double precision floating point** type + +4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes` + +.. _pyembed-2d-attrs: + +Required Attributes for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The **attrs** dictionary must contain the following information: + +.. list-table:: 2D Dataplane Attributes + :widths: 5 5 10 + :header-rows: 1 + + * - key + - description + - data type/format + * - valid + - valid time + - string (YYYYMMDD_HHMMSS) + * - init + - initialization time + - string (YYYYMMDD_HHMMSS) + * - lead + - forecast lead + - string (HHMMSS) + * - accum + - accumulation interval + - string (HHMMSS) + * - name + - variable name + - string + * - long_name + - variable long name + - string + * - level + - variable level + - string + * - units + - variable units + - string + * - grid + - grid information + - string or dict + +.. note:: + + Often times Xarray DataArray objects come with their own set of attributes available as a property. To avoid conflict with the required attributes + for MET, it is advised to strip these attributes and rely on the **attrs** dictionary defined in your script. + +The grid entry in the **attrs** dictionary must contain the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. The value of this item in the dictionary can either be a string, or another dictionary. Examples of the **grid** entry defined as a string are: + +• Using a named grid supported by MET: .. code-block:: none + :caption: Named Grid - 'grid': 'G212' + 'grid': 'G212' • As a grid specification string, as described in :ref:`appendixB`: .. code-block:: none + :caption: Grid Specification String - 'grid': 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N' + 'grid': 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N' • As the path to an existing gridded data file: .. code-block:: none + :caption: Grid From File - 'grid': '/path/to/sample_data.grib' + 'grid': '/path/to/sample_data.grib' -When specified as a dictionary, the contents of the **grid** dictionary vary based on the grid **type** string. The entries for the supported grid types are described below: +When specified as a dictionary, the contents of the **grid** entry vary based upon the grid **type**. The required elements for supported grid types are: • **Lambert Conformal** grid dictionary entries: @@ -188,103 +252,339 @@ When specified as a dictionary, the contents of the **grid** dictionary vary bas Additional information about supported grids can be found in :ref:`appendixB`. -**Using Xarray DataArrays** +Finally, an example **attrs** dictionary is shown below: -To use Xarray DataArrays, a similar procedure to the NumPy case is followed. The Xarray DataArray can be represented as a NumPy N-dimensional array (ndarray) via the **values** property of the DataArray, and an **attrs** property that contains a dictionary of attributes. The user must name the Xarray DataArray to be **met_data**. When one of the MET tools runs the Python script, it will look for an Xarray DataArray named **met_data**, and will retrieve the data and metadata from the **values** and **attrs** properties, respectively, of the Xarray DataArray. The Xarray DataArray **attrs** dictionary is populated in the same way as for the NumPy interface (please see :ref:`pyembed-2d-data` for requirements of each entry in the **attrs** dictionary). The **values** NumPy ndarray property of the Xarray DataArray is also populated in the same way as the NumPy case. +.. code-block:: none + :caption: Sample Attrs Dictionary + + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': 'Foo', + 'long_name': 'FooBar', + 'level': 'Surface', + 'units': 'None', + + # Define 'grid' as a string or a dictionary + + 'grid': { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + 'name': 'FooGrid', + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + 'lat_pin': 12.19, + 'lon_pin': -135.459, + 'x_pin': 0.0, + 'y_pin': 0.0, + 'lon_orient': -95.0, + 'd_km': 40.635, + 'r_km': 6371.2, + 'nx': 185, + 'ny': 129, + } + } + +Running Python Embedding for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either **PYTHON_NUMPY** or **PYTHON_XARRAY** wherever a (forecast or observation) data file would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data (typically used to specify the field name from the input data file), the user should list the **full path** to the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it is entirely possible to use the **PYTHON_NUMPY** for one file and the **PYTHON_XARRAY** for the other. + +Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory: -.. note:: - Currently, MET does not support Xarray Dataset structures. If you have a Dataset in Xarray, you can create a DataArray of a single variable using: +.. code-block:: none + :caption: plot_data_plane Python Embedding - met_data = xr.DataArray(ds.varname,attrs=ds.attrs) + plot_data_plane PYTHON_NUMPY fcst.ps \ + 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ + -title "Python enabled plot_data_plane" + +The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling Python script that has a two-dimensional gridded dataplane stored in a NumPy N-D array object, set this to the constant string **PYTHON_NUMPY**. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the full path of the Python script to be run along with any command line arguments for that script. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages: - | ds = your Dataset name - | varname = variable name in the Dataset you'd like to use in MET +.. code-block:: none + + DEBUG 1: Opening data file: PYTHON_NUMPY + Input File: 'data/python/fcst.txt' + Data Name : 'FCST' + Data Shape: (129, 185) + Data Type: dtype('float64') + Attributes: {'name': 'FCST', 'long_name': 'FCST_word', + 'level': 'Surface', 'units': 'None', + 'init': '20050807_000000', 'valid': '20050807_120000', + 'lead': '120000', 'accum': '120000' + 'grid': {...} } + DEBUG 1: Creating postscript file: fcst.ps -__________________ +Special Case for Ensemble-Stat, Series-Analysis, and MTD +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -It remains to discuss command lines and config files. Two methods for specifying the Python command and input file name are supported. +The Ensemble-Stat, Series-Analysis, MTD and Gen-Ens-Prod tools all have the ability to read multiple input files. Because of this feature, a different approach to Python embedding is required. A typical use of these tools is to provide a list of files on the command line. For example: -**Python Embedding Option 1:** +.. code-block:: + :caption: Gen-Ens-Prod Command Line -On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either PYTHON_NUMPY or PYTHON_XARRAY wherever a (forecast or observation) data file name would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data, the user should list the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it would be entirely possible to use the NumPy interface for one file and the Xarray interface for the other. + gen_ens_prod ens1.nc ens2.nc ens3.nc ens4.nc -out ens_prod.nc -config GenEnsProd_config -___________________ +In this case, a user is passing 4 ensemble members to Gen-Ens-Prod to be evaluated, and each member is in a separate file. If a user wishes to use Python embedding to process the ensemble input files, then the same exact command is used however special modifications inside the GenEnsProd_config file are needed. In the config file dictionary, the user must set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate the Python embedding for these tools. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, the user must list the **full path** to the Python script to be run. However, in the Python command, replace the name of the input gridded data file to the Python script with the constant string **MET_PYTHON_INPUT_ARG**. When looping over all of the input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the input file currently being processed and optionally, any command line arguments for the Python script. Here is what this looks like in the GenEnsProd_config file for the above example: -Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory. +.. code-block:: + :caption: Gen-Ens-Prod MET_PYTHON_INPUT_ARG Config -.. code-block:: none + file_type = PYTHON_NUMPY; + field = [ { name = "gen_ens_prod_pyembed.py MET_PYTHON_INPUT_ARG"; } ]; - plot_data_plane PYTHON_NUMPY fcst.ps \ - 'name="scripts/python/read_ascii_numpy.py data/python/fcst.txt FCST";' \ - -title "Python enabled plot_data_plane" - -The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling a NumPy Python script, set this to the constant string PYTHON_NUMPY. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the Python script to be run along with command line arguments. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages. +In the event the user requires command line arguments to their Python script, they must be included alongside the file names separated by a delimiter. For example, the above Gen-Ens-Prod command with command line arguments for Python would look like: -.. code-block:: none - - DEBUG 1: Opening data file: PYTHON_NUMPY - Input File: 'data/python/fcst.txt' - Data Name : 'FCST' - Data Shape: (129, 185) - Data Type: dtype('float64') - Attributes: {'name': 'FCST', 'long_name': 'FCST_word', - 'level': 'Surface', 'units': 'None', - 'init': '20050807_000000', 'valid': '20050807_120000', - 'lead': '120000', 'accum': '120000' - 'grid': {...} } - DEBUG 1: Creating postscript file: fcst.ps +.. code-block:: + :caption: Gen-Ens-Prod Command Line with Python Args + + gen_ens_proce ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2 \ + -out ens_prod.nc -config GenEnsProd_config + +In this case, the user's Python script will receive "ens1.nc,arg1,arg2" as a single command line argument for each execution of the Python script (i.e. 1 time per file). The user must parse this argument inside their Python script to obtain **arg1** and **arg2** as separate arguments. The list of input files and optionally, any command line arguments can be written to a single file called **file_list** that is substituted for the file names and command line arguments. For example: -**Python Embedding Option 2 using MET_PYTHON_INPUT_ARG:** +.. code-block:: + :caption: Gen-Ens-Prod File List -The second option was added to support the use of Python embedding in tools which read multiple input files. Option 1 reads a single field of data from a single source, whereas tools like Ensemble-Stat, Series-Analysis, and MTD read data from multiple input files. While option 2 can be used in any of the MET tools, it is required for Python embedding in Ensemble-Stat, Series-Analysis, and MTD. + echo "ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2" > file_list + gen_ens_prod file_list -out ens_prod.nc -config GenEnsProd_config -On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in PYTHON_NUMPY or PYTHON_XARRAY on the command line. In the config file dictionary set the **file_type** entry to either PYTHON_NUMPY or PYTHON_XARRAY to activate the Python embedding logic. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string MET_PYTHON_INPUT_ARG. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the option 2 logic instead. +Finally, the above tools do not require data files to be present on a local disk. If the user wishes, their Python script can obtain data from other sources based upon only the command line arguments to their Python script. For example: -The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. +.. code-block:: + :caption: Gen-Ens-Prod Python Args Only + + gen_ens_prod 20230101,0 20230102,0 20230103,0 -out ens_prod.nc -confg GenEnsProd_config + +In the above command, each of the arguments "20230101,0", "20230102,0", and "20230103,0" are provided to the user's Python script in separate calls. Then, inside the Python script these arguments are used to construct a filename or query to a data server or other mechanism to return the desired data and format it the way MET expects inside the Python script, prior to calling Gen-Ens-Prod. + +Examples of Python Embedding for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +**Grid-Stat with Python embedding for forecast and observations** .. code-block:: none - - plot_data_plane data/python/fcst.txt fcst.ps \ - 'name="scripts/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ - file_type=PYTHON_NUMPY;' \ - -title "Python enabled plot_data_plane" + :caption: GridStat Command with Dual Python Embedding + + grid_stat 'PYTHON_NUMPY' 'PYTHON_NUMPY' GridStat_config -outdir /path/to/output + +.. code-block:: none + :caption: GridStat Config with Dual Python Embedding + + fcst = { + field = [ + { + name = "/path/to/fcst/python/script.py python_arg1 python_arg2"; + } + ]; + } + + obs = { + field = [ + { + name = "/path/to/obs/python/script.py python_arg1 python_arg2"; + } + ]; + } .. _pyembed-point-obs-data: Python Embedding for Point Observations -======================================= - -The ASCII2NC tool supports the "-format python" option. With this option, point observations may be passed as input. An example of this is shown below: +--------------------------------------- + +MET also supports point observation data supplied in the :ref:`MET 11-column format`. + +Python Script Requirements for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The data must be stored in a variable with the name **point_data** + +2. The **point_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame + +3. The **point_data** variable must have data in each of the 11 columns required for the MET tools even if it is NA + +To provide the data that MET expects for point observations, the user is encouraged when designing their Python script to consider how to map their observations into the MET 11-column format. Then, the user can populate their observations into a Pandas DataFrame with the following column names and dtypes: + +.. list-table:: Point Observation DataFrame Columns and Dtypes + :widths: 5 5 10 + :header-rows: 1 + + * - column name + - data type (dtype) + - description + * - typ + - string + - Message Type + * - sid + - string + - Station ID + * - vld + - string + - Valid Time (YYYYMMDD_HHMMSS) + * - lat + - numeric + - Latitude (Degrees North) + * - lon + - numeric + - Longitude (Degrees East) + * - elv + - numeric + - Elevation (MSL) + * - var + - string + - Variable name (or GRIB code) + * - lvl + - numeric + - Level + * - hgt + - numeric + - Height (MSL or AGL) + * - qc + - string + - QC string + * - obs + - numeric + - Observation Value + +To create the variable for MET, use the **.values** property of the Pandas DataFrame and the **.tolist()** method of the NumPy N-D Array. For example: + +.. code-block:: Python + :caption: Convert Pandas DataFrame to MET variable + + # Pandas DataFrame + my_dataframe = pd.DataFrame() + + # Convert to MET variable + point_data = my_dataframe.values.tolist() + +Running Python Embedding for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools support Python embedding for point observations. Python embedding for these tools can be invoked directly on the command line by replacing the input MET NetCDF point observation file name with the **full path** to the Python script and any arguments. The Python command must begin with the prefix **PYTHON_NUMPY=**. The full command should be enclosed in quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below for Plot-Point-Obs: .. code-block:: none + :caption: plot_point_obs with Python Embedding - ascii2nc -format python \ - "MET_BASE/python/read_ascii_point.py sample_ascii_obs.txt" \ - sample_ascii_obs_python.nc + plot_point_obs \ + "PYTHON_NUMPY=scripts/python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + output_image.ps -The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process point observations. They support Python embedding of point observations directly on the command line by replacing the input MET NetCDF point observation file name with the Python command to be run. The Python command must begin with the prefix 'PYTHON_NUMPY=' and be followed by the path to the User's Python script and any arguments. The full command should be enclosed in single quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below: +The ASCII2NC tool also supports Python embedding, however invoking it varies slightly from other MET tools. For ASCII2NC, Python embedding is used by providing the "-format python" option on the command line. With this option, point observations may be passed as input. An example of this is shown below: .. code-block:: none + :caption: ascii2nc with Python Embedding + + ascii2nc -format python \ + "scripts/python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + sample_ascii_obs_python.nc - plot_point_obs \ - "PYTHON_NUMPY=MET_BASE/python/read_ascii_point.py sample_ascii_obs.txt" \ - output_image.ps +Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: -Both of the above examples use the **read_ascii_point.py** sample script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** sample script can be found in: +• MET installation directory in *scripts/python/examples*. -• MET installation directory in *MET_BASE/python*. +• `MET GitHub repository `_ in *scripts/python/examples*. -• `MET GitHub repository `_ in *met/scripts/python*. +Examples of Python Embedding for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +**Point-Stat with Python embedding for forecast and observations** + +.. code-block:: none + :caption: PointStat Command with Dual Python Embedding + + point_stat 'PYTHON_NUMPY' 'PYTHON_NUMPY' PointStat_config -outdir /path/to/output + +.. code-block:: none + :caption: PointStat Config with Dual Python Embedding + + fcst = { + field = [ + { + name = "/path/to/fcst/python/script.py python_arg1 python_arg2"; + } + ]; + } + + obs = { + field = [ + { + name = "/path/to/obs/python/script.py python_arg1 python_arg2"; + } + ]; + } .. _pyembed-mpr-data: -Python Embedding for MPR data -============================= +Python Embedding for MPR Data +----------------------------- + +The MET Stat-Analysis tool also supports Python embedding. By using the command line option **-lookin python**, Stat-Analysis can read matched pair (MPR) data formatted in the MET MPR line-type format via Python. + +.. note:: + + This functionality assumes you are passing only the MPR line type information, and not other statistical line types. Sometimes users configure MET tools to write the MPR line type to the STAT file (along with all other line types). The example below will not work for those files, but rather only files from MET tools containing just the MPR line type information, or optionally, data in another format that the user adapts to the MPR line type format. + +Python Script Requirements for MPR Data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The data must be stored in a variable with the name **mpr_data** + +2. The **mpr_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame + +3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`common STAT output` and the :ref:`MPR line type output`. + +If a user does not have an existing MPR line type file created by the MET tools, they will need to map their data into the 36 columns expected by Stat-Analysis for the MPR line type data. If a user already has MPR line type files, the most direct way for a user to read MPR line type data is to model their Python script after the sample **read_ascii_mpr.py** script. Sample code is included here for convenience: + +.. code-block:: Python + :caption: Reading MPR line types with Pandas + + # Open the MPR line type file + mpr_dataframe = pd.read_csv(input_mpr_file,\ + header=None,\ + delim_whitespace=True,\ + keep_default_na=False,\ + skiprows=1,\ + usecols=range(1,36),\ + dtype=str) + + # Convert to the variable MET expects + mpr_data = mpr_dataframe.values.tolist() + +Running Python Embedding for MPR Data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The Stat-Analysis tool supports the "-lookin python" option. With this option, matched pair (MPR) data may be passed as input. An example of this is provided in :numref:`StA-pyembed`. That example uses the **read_ascii_mpr.py** sample script which is included with the MET code. It reads MPR data and stores it in a Pandas dataframe to be read by the Stat-Analysis tool with Python. +Stat-Analysis can be run using the **-lookin python** command line option: + +.. code-block:: none + :caption: Stat-Analysis with Python Embedding of MPR Data + + stat_analysis \ + -lookin python scripts/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ + -job aggregate_stat -line_type MPR -out_line_type CNT \ + -by FCST_VAR,FCST_LEV + +In this example, rather than passing the MPR output lines from Point-Stat directly into Stat-Analysis (which is the typical approach), the **read_ascii_mpr.py** Python embedding script reads that file and passes the data to Stat-Analysis. The aggregate_stat job is defined on the command line and CNT statistics are derived from the MPR input data. Separate CNT statistics are computed for each unique combination of FCST_VAR and FCST_LEV present in the input. The **read_ascii_mpr.py** sample script can be found in: -• MET installation directory in *MET_BASE/python*. +• MET installation directory in *scripts/python/examples*. + +• `MET GitHub repository `_ in *MET/scripts/python/examples*. + +MET Python Package +================== + +MET comes with a Python package that provides core functionality for the Python embedding capability. In rare cases, advanced users may find the classes and functions included with this Python package useful. + +To utilize the MET Python package **standalone** when NOT using it with Python embedding, users must add the following to their **PYTHONPATH** environment variable: + +.. code-block:: + :caption: MET Python Module PYTHONPATH + + export PYTHONPATH={MET_INSTALL_DIR}/share/met/python -• `MET GitHub repository `_ in *met/scripts/python*. +where {MET_INSTALL_DIR} is the top level directory where MET is installed, for example **/usr/local/met**. diff --git a/docs/Users_Guide/installation.rst b/docs/Users_Guide/installation.rst index 9db4d6993f..534c5dece4 100644 --- a/docs/Users_Guide/installation.rst +++ b/docs/Users_Guide/installation.rst @@ -122,7 +122,7 @@ MET Directory Structure The top-level MET directory consists of Makefiles, configuration files, and several subdirectories. The top-level Makefile and configuration files control how the entire toolkit is built. Instructions for using these files to build MET can be found in :numref:`Install_Building-the-MET`. -When MET has been successfully built and installed, the installation directory contains two subdirectories. The *bin/* directory contains executables for each module of MET as well as several plotting utilities. The *share/met/* directory contains many subdirectories with data required at runtime and a subdirectory of sample R scripts utilities. The *colortables/*, *map/*, and *ps/* subdirectories contain data used in creating PostScript plots for several MET tools. The *poly/* subdirectory contains predefined lat/lon polyline regions for use in selecting regions over which to verify. The polylines defined correspond to verification regions used by NCEP as described in :numref:`Appendix B, Section %s `. The *config/* directory contains default configuration files for the MET tools. The *python/* subdirectory contains sample scripts used in Python embedding (:numref:`Appendix F, Section %s `). The *table_files/* and *tc_data/* subdirectories contain GRIB table definitions and tropical cyclone data, respectively. The *Rscripts/* subdirectory contains a handful of plotting graphic utilities for MET-TC. These are the same Rscripts that reside under the top-level MET *scripts/Rscripts* directory, other than it is the installed location. The *wrappers/* subdirectory contains code used in Python embedding (:numref:`Appendix F, Section %s `). +When MET has been successfully built and installed, the installation directory contains two subdirectories. The *bin/* directory contains executables for each module of MET as well as several plotting utilities. The *share/met/* directory contains many subdirectories with data required at runtime and a subdirectory of sample R scripts utilities. The *colortables/*, *map/*, and *ps/* subdirectories contain data used in creating PostScript plots for several MET tools. The *poly/* subdirectory contains predefined lat/lon polyline regions for use in selecting regions over which to verify. The polylines defined correspond to verification regions used by NCEP as described in :numref:`Appendix B, Section %s `. The *config/* directory contains default configuration files for the MET tools. The *python/* subdirectory contains python scripts. The *python/examples* subdirectory contains sample scripts used in Python embedding (:numref:`Appendix F, Section %s `). The *python/pyembed/* subdirectory contains code used in Python embedding (:numref:`Appendix F, Section %s `). The *table_files/* and *tc_data/* subdirectories contain GRIB table definitions and tropical cyclone data, respectively. The *Rscripts/* subdirectory contains a handful of plotting graphic utilities for MET-TC. These are the same Rscripts that reside under the top-level MET *scripts/Rscripts* directory, other than it is the installed location. The *data/* directory contains several configuration and static data files used by MET. The *sample_fcst/* and *sample_obs/* subdirectories contain sample data used by the test scripts provided in the *scripts/* directory. @@ -209,7 +209,7 @@ The following environment variables should also be set: MET_PYTHON_CC='-I/usr/include/python3.6' MET_PYTHON_LD='-L/usr/lib/python3.6/config-x86_64-linux-gnu -lpython3.6m' - Note that this version of Python must include support for a minimum set of required pacakges. For more information about Python support in MET, including the list of required packages, please refer to :numref:`Appendix F, Section %s `. + Note that this version of Python must include support for a minimum set of required packages. For more information about Python support in MET, including the list of required packages, please refer to :numref:`Appendix F, Section %s `. * If compiling MODIS-Regrid and/or lidar2nc, set $MET_HDF to point to the main HDF4 directory, or set $MET_HDFINC to point to the directory with the HDF4 include files and set $MET_HDFLIB to point to the directory with the HDF4 library files. Also, set $MET_HDFEOS to point to the main HDF EOS directory, or set $MET_HDFEOSINC to point to the directory with the HDF EOS include files and set $MET_HDFEOSLIB to point to the directory with the HDF EOS library files. diff --git a/docs/Users_Guide/plotting.rst b/docs/Users_Guide/plotting.rst index 1db3b4be91..1ac44e2f7e 100644 --- a/docs/Users_Guide/plotting.rst +++ b/docs/Users_Guide/plotting.rst @@ -71,7 +71,7 @@ An equivalent command using python embedding for point observations is shown bel .. code-block:: none - plot_point_obs 'PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py sample_pb.nc' sample_data.ps + plot_point_obs 'PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py sample_pb.nc' sample_data.ps Please see section :numref:`pyembed-point-obs-data` for more details about Python embedding in MET. diff --git a/docs/Users_Guide/reformat_point.rst b/docs/Users_Guide/reformat_point.rst index 1cd9b4705d..809639c249 100644 --- a/docs/Users_Guide/reformat_point.rst +++ b/docs/Users_Guide/reformat_point.rst @@ -1042,7 +1042,7 @@ Required arguments for point2grid 1. The **input_filename** argument indicates the name of the input file to be processed. The input can be a MET NetCDF point observation file generated by other MET tools or a NetCDF AOD dataset from GOES16/17. Python embedding for point observations is also supported, as described in :numref:`pyembed-point-obs-data`. -The MET point observation NetCDF file name as **input_filename** argument is equivalent with "PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py netcdf_file name'. +The MET point observation NetCDF file name as **input_filename** argument is equivalent with "PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py netcdf_filename". 2. The **to_grid** argument defines the output grid as: (1) a named grid, (2) the path to a gridded data file, or (3) an explicit grid specification string. @@ -1100,7 +1100,7 @@ Listed below is an example of processing the same set of observations but using .. code-block:: none point2grid \ - 'PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py ascii2nc_edr_hourly.20130827.nc' \ + 'PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py ascii2nc_edr_hourly.20130827.nc' \ G212 python_gridded_ascii_python.nc -config Point2GridConfig_edr \ -field 'name="200"; level="*"; valid_time="20130827_205959";' -method MAX -v 1 @@ -1191,10 +1191,10 @@ The script can be found at: .. code-block:: none - MET_BASE/utility/print_pointnc2ascii.py + MET_BASE/python/utility/print_pointnc2ascii.py For how to use the script, issue the command: .. code-block:: none - python3 MET_BASE/utility/print_pointnc2ascii.py -h + python3 MET_BASE/python/utility/print_pointnc2ascii.py -h diff --git a/docs/Users_Guide/stat-analysis.rst b/docs/Users_Guide/stat-analysis.rst index 1c1f1db4c0..8488dd39df 100644 --- a/docs/Users_Guide/stat-analysis.rst +++ b/docs/Users_Guide/stat-analysis.rst @@ -9,7 +9,7 @@ Introduction The Stat-Analysis tool ties together results from the Point-Stat, Grid-Stat, Ensemble-Stat, Wavelet-Stat, and TC-Gen tools by providing summary statistical information and a way to filter their STAT output files. It processes the STAT output created by the other MET tools in a variety of ways which are described in this section. -MET version 9.0 adds support for the passing matched pair data (MPR) into Stat-Analysis using a Python script with the "-lookin python ..." option. An example of running Stat-Analysis with Python embedding is shown in :numref:`stat_analysis-usage`. +MET version 9.0 adds support for the passing matched pair data (MPR) into Stat-Analysis using a Python script with the "-lookin python ..." option. An example of running Stat-Analysis with Python embedding can be found in :numref:`Appendix F, Section %s `. Scientific and statistical aspects ================================== @@ -287,7 +287,7 @@ In the usage statement for the Stat-Analysis tool, some additional terminology i Required arguments for stat_analysis ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. The **-lookin path** specifies the name of a directory to be searched recursively for STAT files (ending in ".stat") or any explicit file name with any suffix (such as "_ctc.txt") to be read. This option may be used multiple times to specify multiple directories and/or files to be read. If "-lookin python" is used, it must be followed by a Python embedding script and any command line arguments it takes. Python embedding can be used to pass matched pair (MPR) lines as input to Stat-Analysis. +1. The **-lookin path** specifies the name of a directory to be searched recursively for STAT files (ending in ".stat") or any explicit file name with any suffix (such as "_ctc.txt") to be read. This option may be used multiple times to specify multiple directories and/or files to be read. If "-lookin python" is used, it must be followed by a Python embedding script and any command line arguments it takes. Python embedding can be used to pass **only** matched pair (MPR) lines as input to Stat-Analysis. 2. Either a configuration file must be specified with the **-config** option, or a **JOB COMMAND LINE** must be denoted. The **JOB COMMAND LINE** is described in :numref:`stat_analysis-configuration-file` @@ -313,22 +313,6 @@ An example of the stat_analysis calling sequence is shown below. In this example, the Stat-Analysis tool will search for valid STAT lines located in the *../out/point_stat* directory that meet the options specified in the configuration file, *config/STATAnalysisConfig*. -.. _StA-pyembed: - -Python Embedding for Matched Pairs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The example below uses Python embedding. - -.. code-block:: none - - stat_analysis \ - -lookin python MET_BASE/python/read_ascii_mpr.py point_stat_mpr.txt \ - -job aggregate_stat -line_type MPR -out_line_type CNT \ - -by FCST_VAR,FCST_LEV - -In this example, rather than passing the MPR output lines from Point-Stat directly into Stat-Analysis (which is the typical approach), the read_ascii_mpr.py Python embedding script reads that file and passes the data to Stat-Analysis. The aggregate_stat job is defined on the command line and CNT statistics are derived from the MPR input data. Separate CNT statistics are computed for each unique combination of FCST_VAR and FCST_LEV present in the input. Please refer to :numref:`Appendix F, Section %s ` for more details about Python embedding in MET. - .. _stat_analysis-configuration-file: stat_analysis configuration file diff --git a/internal/test_unit/xml/unit_gen_vx_mask.xml b/internal/test_unit/xml/unit_gen_vx_mask.xml index 342721af33..b83bb9a033 100644 --- a/internal/test_unit/xml/unit_gen_vx_mask.xml +++ b/internal/test_unit/xml/unit_gen_vx_mask.xml @@ -500,8 +500,8 @@ PYTHON_NUMPY \ &OUTPUT_DIR;/gen_vx_mask/PYTHON_FCST_or_OBS_mask.nc \ -type data \ - -input_field 'name="&MET_BASE;/python/read_ascii_numpy.py &MET_DATA;/python/fcst.txt FCST";' \ - -mask_field 'name="&MET_BASE;/python/read_ascii_numpy.py &MET_DATA;/python/obs.txt OBS";' \ + -input_field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &MET_DATA;/python/fcst.txt FCST";' \ + -mask_field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &MET_DATA;/python/obs.txt OBS";' \ -thresh gt0 -union -v 3 diff --git a/internal/test_unit/xml/unit_python.xml b/internal/test_unit/xml/unit_python.xml index 5a519d9212..051f709a62 100644 --- a/internal/test_unit/xml/unit_python.xml +++ b/internal/test_unit/xml/unit_python.xml @@ -31,7 +31,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_name.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid Name: 'G212'" \ -v 1 @@ -53,7 +53,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_string.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid String: 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N'" \ -v 1 @@ -74,7 +74,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_data_file.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Gridded Data File: 'wrfprs_ruc13_12.tm00_G212'" \ -v 1 @@ -90,7 +90,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled numpy plot_data_plane" \ -v 1 @@ -105,7 +105,7 @@ \ PYTHON_XARRAY \ &OUTPUT_DIR;/python/letter_xarray.ps \ - 'name = "&MET_BASE;/python/read_ascii_xarray.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_xarray.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled xarray plot_data_plane" \ -v 1 @@ -120,7 +120,7 @@ \ &DATA_DIR_PYTHON;/letter.txt \ &OUTPUT_DIR;/python/letter_file_type.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG LETTER"; file_type=PYTHON_NUMPY;' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG LETTER"; file_type=PYTHON_NUMPY;' \ -plot_range 0.0 255.0 \ -title "Python enabled plot_data_plane using file_type option" \ -v 1 @@ -133,7 +133,7 @@ &MET_BIN;/mode - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ &OUTPUT_DIR;/pcp_combine/arw-tom-gep0_2012040912_F030_APCP06.nc \ @@ -152,8 +152,8 @@ &MET_BIN;/mode - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -172,7 +172,7 @@ &MET_BIN;/grid_stat - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ &DATA_DIR_MODEL;/grib1/nam_st4/nam_2012040900_F012_gSt4.grib \ @@ -189,8 +189,8 @@ &MET_BIN;/grid_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -206,8 +206,8 @@ &MET_BIN;/point_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -224,8 +224,8 @@ &MET_BIN;/wavelet_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -244,7 +244,7 @@ &MET_BIN;/wavelet_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST \ PYTHON_NUMPY \ @@ -266,7 +266,7 @@ PYTHON_NUMPY \ G130 \ &OUTPUT_DIR;/python/regrid_data_plane.nc \ - -field 'name="&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ + -field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ -v 1 @@ -279,7 +279,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/shift_data_plane.nc \ - 'name="&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ + 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ -from 30 -110 -to 35 -115 \ -v 1 @@ -293,7 +293,7 @@ \ &DATA_DIR_PYTHON;/fcst.txt \ &OUTPUT_DIR;/python/shift_data_plane_input_arg.nc \ - 'name="&MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; file_type=PYTHON_NUMPY;' \ + 'name="&MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; file_type=PYTHON_NUMPY;' \ -from 30 -110 -to 35 -115 \ -v 1 @@ -305,8 +305,8 @@ &MET_BIN;/series_analysis - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS \ -fcst &DATA_DIR_PYTHON;/fcst.txt &DATA_DIR_PYTHON;/fcst.txt \ @@ -324,8 +324,8 @@ &MET_BIN;/mtd - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS OUTPUT_PREFIX PYTHON \ @@ -348,8 +348,8 @@ &MET_BIN;/ensemble_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS OUTPUT_PREFIX PYTHON \ @@ -369,7 +369,7 @@ &MET_BIN;/ascii2nc \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ &OUTPUT_DIR;/python/ascii2nc_python.nc \ -format python @@ -382,7 +382,7 @@ &MET_BIN;/ascii2nc \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs_varname.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs_varname.txt" \ &OUTPUT_DIR;/python/ascii2nc_python_varname.nc \ -format python @@ -395,7 +395,7 @@ &MET_BIN;/stat_analysis \ - -lookin python &MET_BASE;/python/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ + -lookin python &MET_BASE;/python/examples/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ -job aggregate_stat -line_type MPR -out_line_type sl1l2 -by FCST_VAR \ -out_stat &OUTPUT_DIR;/python/stat_analysis_python_AGGR_MPR_to_SL1L2.stat @@ -415,7 +415,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_name_user_python.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid Name: 'G212'" \ -v 1 @@ -433,7 +433,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ &OUTPUT_DIR;/python/ascii2nc_user_python.nc \ -format python @@ -453,7 +453,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_user_python.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled plot_data_plane" \ -v 1 @@ -471,7 +471,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - -lookin python &MET_BASE;/python/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ + -lookin python &MET_BASE;/python/examples/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ -job aggregate_stat -line_type MPR -out_line_type sl1l2 -by FCST_VAR \ -out_stat &OUTPUT_DIR;/python/stat_analysis_user_python_AGGR_MPR_to_SL1L2.stat @@ -483,7 +483,7 @@ &MET_BIN;/point2grid \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ G212 \ &OUTPUT_DIR;/python/pb2nc_TMP.nc \ -field 'name="TMP"; level="*"; valid_time="20120409_120000"; censor_thresh=[ <0 ]; censor_val=[0];' \ @@ -502,7 +502,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ G212 \ &OUTPUT_DIR;/python/pb2nc_TMP_user_python.nc \ -field 'name="TMP"; level="*"; valid_time="20120409_120000"; censor_thresh=[ <0 ]; censor_val=[0];' \ @@ -520,7 +520,7 @@ TO_GRID NONE \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/precip24_2010010112.ascii' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/precip24_2010010112.ascii' \ &OUTPUT_DIR;/python/precip24_2010010112.ps \ -config &CONFIG_DIR;/PlotPointObsConfig \ -plot_grid &DATA_DIR_MODEL;/grib2/nam/nam_2012040900_F012.grib2 \ @@ -538,9 +538,9 @@ TO_GRID NONE \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ &OUTPUT_DIR;/python/nam_and_ndas.20120409.t12z.prepbufr_CONFIG.ps \ - -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/trmm_2012040912_3hr.nc' \ + -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/trmm_2012040912_3hr.nc' \ -plot_grid &DATA_DIR_MODEL;/grib2/nam/nam_2012040900_F012.grib2 \ -config &CONFIG_DIR;/PlotPointObsConfig \ -title "NAM 2012040900 F12 vs NDAS 500mb RH and TRMM 3h > 0" \ @@ -570,7 +570,7 @@ &OUTPUT_DIR;/python/ensemble_stat/input_file_list \ &CONFIG_DIR;/EnsembleStatConfig \ -grid_obs &DATA_DIR_OBS;/laps/laps_2012041012_F000.grib \ - -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/gauge_2012041012_24hr.nc' \ + -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/gauge_2012041012_24hr.nc' \ -outdir &OUTPUT_DIR;/python/ensemble_stat -v 1 @@ -595,7 +595,7 @@ \ &DATA_DIR_MODEL;/grib1/nam/nam_2012040900_F012.grib \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/gdas1.20120409.t12z.prepbufr.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/gdas1.20120409.t12z.prepbufr.nc' \ &CONFIG_DIR;/PointStatConfig_WINDS \ -outdir &OUTPUT_DIR;/python -v 1 @@ -609,7 +609,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/wrfout_d01_2008-08-08_12_00_00_PLEV_ZONAL_MEAN.ps \ - 'name="&MET_BASE;/python/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lat";' \ + 'name="&MET_BASE;/python/examples/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lat";' \ -title "WRF Zonal Mean" \ -v 1 @@ -622,7 +622,7 @@ &MET_BIN;/pcp_combine \ -add PYTHON_NUMPY \ - 'name="&MET_BASE;/python/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lon";' \ + 'name="&MET_BASE;/python/examples/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lon";' \ &OUTPUT_DIR;/python/wrfout_d01_2008-08-08_12_00_00_PLEV_MERIDIONAL_MEAN.nc \ -name "TT_MERIDIONAL_MEAN" -v 1 diff --git a/scripts/python/Makefile.am b/scripts/python/Makefile.am index 689708e4c3..c3b7b20042 100644 --- a/scripts/python/Makefile.am +++ b/scripts/python/Makefile.am @@ -18,6 +18,11 @@ # SUBDIRS = include +SUBDIRS = \ + examples \ + met \ + pyembed \ + utility ## Example of how to Install outside of $(pkgdatadir) ## scriptsrootdir = $(prefix)/share/scripts @@ -25,17 +30,10 @@ pythonscriptsdir = $(pkgdatadir)/python -pythonscripts_DATA = \ - met_point_obs.py \ - met_point_obs_nc.py \ - read_ascii_numpy.py \ - read_ascii_numpy_grid.py \ - read_ascii_xarray.py \ - read_ascii_point.py \ - read_ascii_mpr.py \ - read_met_point_obs.py \ - derive_WRF_semilatlon.py - -EXTRA_DIST = ${pythonscripts_DATA} +#EXTRA_DIST = ${top_DATA} \ +# sample_fcst \ +# sample_obs \ +# python \ +# copyright_notice.txt MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/Makefile.in b/scripts/python/Makefile.in index 6d85ed81f9..5ff5daed23 100644 --- a/scripts/python/Makefile.in +++ b/scripts/python/Makefile.in @@ -15,7 +15,6 @@ @SET_MAKE@ # SUBDIRS = include - VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ @@ -114,43 +113,74 @@ am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = +RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ + ctags-recursive dvi-recursive html-recursive info-recursive \ + install-data-recursive install-dvi-recursive \ + install-exec-recursive install-html-recursive \ + install-info-recursive install-pdf-recursive \ + install-ps-recursive install-recursive installcheck-recursive \ + installdirs-recursive pdf-recursive ps-recursive \ + tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; -am__install_max = 40 -am__nobase_strip_setup = \ - srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` -am__nobase_strip = \ - for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" -am__nobase_list = $(am__nobase_strip_setup); \ - for p in $$list; do echo "$$p $$p"; done | \ - sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ - $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ - if (++n[$$2] == $(am__install_max)) \ - { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ - END { for (dir in files) print dir, files[dir] }' -am__base_list = \ - sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ - sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' -am__uninstall_files_from_dir = { \ - test -z "$$files" \ - || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ - || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ - $(am__cd) "$$dir" && rm -f $$files; }; \ - } -am__installdirs = "$(DESTDIR)$(pythonscriptsdir)" -DATA = $(pythonscripts_DATA) +RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ + distclean-recursive maintainer-clean-recursive +am__recursive_targets = \ + $(RECURSIVE_TARGETS) \ + $(RECURSIVE_CLEAN_TARGETS) \ + $(am__extra_recursive_targets) +AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ + distdir distdir-am am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +DIST_SUBDIRS = $(SUBDIRS) am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +am__relativize = \ + dir0=`pwd`; \ + sed_first='s,^\([^/]*\)/.*$$,\1,'; \ + sed_rest='s,^[^/]*/*,,'; \ + sed_last='s,^.*/\([^/]*\)$$,\1,'; \ + sed_butlast='s,/*[^/]*$$,,'; \ + while test -n "$$dir1"; do \ + first=`echo "$$dir1" | sed -e "$$sed_first"`; \ + if test "$$first" != "."; then \ + if test "$$first" = ".."; then \ + dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ + dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ + else \ + first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ + if test "$$first2" = "$$first"; then \ + dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ + else \ + dir2="../$$dir2"; \ + fi; \ + dir0="$$dir0"/"$$first"; \ + fi; \ + fi; \ + dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ + done; \ + reldir="$$dir2" ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ @@ -296,21 +326,21 @@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ +SUBDIRS = \ + examples \ + met \ + pyembed \ + utility + pythonscriptsdir = $(pkgdatadir)/python -pythonscripts_DATA = \ - met_point_obs.py \ - met_point_obs_nc.py \ - read_ascii_numpy.py \ - read_ascii_numpy_grid.py \ - read_ascii_xarray.py \ - read_ascii_point.py \ - read_ascii_mpr.py \ - read_met_point_obs.py \ - derive_WRF_semilatlon.py - -EXTRA_DIST = ${pythonscripts_DATA} + +#EXTRA_DIST = ${top_DATA} \ +# sample_fcst \ +# sample_obs \ +# python \ +# copyright_notice.txt MAINTAINERCLEANFILES = Makefile.in -all: all-am +all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @@ -342,33 +372,105 @@ $(top_srcdir)/configure: $(am__configure_deps) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): -install-pythonscriptsDATA: $(pythonscripts_DATA) - @$(NORMAL_INSTALL) - @list='$(pythonscripts_DATA)'; test -n "$(pythonscriptsdir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(pythonscriptsdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(pythonscriptsdir)" || exit 1; \ - fi; \ - for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - echo "$$d$$p"; \ - done | $(am__base_list) | \ - while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonscriptsdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonscriptsdir)" || exit $$?; \ - done - -uninstall-pythonscriptsDATA: - @$(NORMAL_UNINSTALL) - @list='$(pythonscripts_DATA)'; test -n "$(pythonscriptsdir)" || list=; \ - files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(pythonscriptsdir)'; $(am__uninstall_files_from_dir) -tags TAGS: -ctags CTAGS: - -cscope cscopelist: +# This directory's subdirectories are mostly independent; you can cd +# into them and run 'make' without going through this Makefile. +# To change the values of 'make' variables: instead of editing Makefiles, +# (1) if the variable is set in 'config.status', edit 'config.status' +# (which will cause the Makefiles to be regenerated when you run 'make'); +# (2) otherwise, pass the desired values on the 'make' command line. +$(am__recursive_targets): + @fail=; \ + if $(am__make_keepgoing); then \ + failcom='fail=yes'; \ + else \ + failcom='exit 1'; \ + fi; \ + dot_seen=no; \ + target=`echo $@ | sed s/-recursive//`; \ + case "$@" in \ + distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ + *) list='$(SUBDIRS)' ;; \ + esac; \ + for subdir in $$list; do \ + echo "Making $$target in $$subdir"; \ + if test "$$subdir" = "."; then \ + dot_seen=yes; \ + local_target="$$target-am"; \ + else \ + local_target="$$target"; \ + fi; \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + || eval $$failcom; \ + done; \ + if test "$$dot_seen" = "no"; then \ + $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ + fi; test -z "$$fail" + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-recursive +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ + include_option=--etags-include; \ + empty_fix=.; \ + else \ + include_option=--include; \ + empty_fix=; \ + fi; \ + list='$(SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + test ! -f $$subdir/TAGS || \ + set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ + fi; \ + done; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-recursive + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-recursive + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) distdir-am @@ -403,22 +505,45 @@ distdir-am: $(DISTFILES) || exit 1; \ fi; \ done -check-am: all-am -check: check-am -all-am: Makefile $(DATA) -installdirs: - for dir in "$(DESTDIR)$(pythonscriptsdir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ + dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ + $(am__relativize); \ + new_distdir=$$reldir; \ + dir1=$$subdir; dir2="$(top_distdir)"; \ + $(am__relativize); \ + new_top_distdir=$$reldir; \ + echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ + echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ + ($(am__cd) $$subdir && \ + $(MAKE) $(AM_MAKEFLAGS) \ + top_distdir="$$new_top_distdir" \ + distdir="$$new_distdir" \ + am__remove_distdir=: \ + am__skip_length_check=: \ + am__skip_mode_fix=: \ + distdir) \ + || exit 1; \ + fi; \ done -install: install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am +check-am: all-am +check: check-recursive +all-am: Makefile +installdirs: installdirs-recursive +installdirs-am: +install: install-recursive +install-exec: install-exec-recursive +install-data: install-data-recursive +uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am -installcheck: installcheck-am +installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ @@ -441,85 +566,85 @@ maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) -clean: clean-am +clean: clean-recursive clean-am: clean-generic mostlyclean-am -distclean: distclean-am +distclean: distclean-recursive -rm -f Makefile -distclean-am: clean-am distclean-generic +distclean-am: clean-am distclean-generic distclean-tags -dvi: dvi-am +dvi: dvi-recursive dvi-am: -html: html-am +html: html-recursive html-am: -info: info-am +info: info-recursive info-am: -install-data-am: install-pythonscriptsDATA +install-data-am: -install-dvi: install-dvi-am +install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: -install-html: install-html-am +install-html: install-html-recursive install-html-am: -install-info: install-info-am +install-info: install-info-recursive install-info-am: install-man: -install-pdf: install-pdf-am +install-pdf: install-pdf-recursive install-pdf-am: -install-ps: install-ps-am +install-ps: install-ps-recursive install-ps-am: installcheck-am: -maintainer-clean: maintainer-clean-am +maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic -mostlyclean: mostlyclean-am +mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic -pdf: pdf-am +pdf: pdf-recursive pdf-am: -ps: ps-am +ps: ps-recursive ps-am: -uninstall-am: uninstall-pythonscriptsDATA +uninstall-am: -.MAKE: install-am install-strip +.MAKE: $(am__recursive_targets) install-am install-strip -.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ - ctags-am distclean distclean-generic distdir dvi dvi-am html \ - html-am info info-am install install-am install-data \ +.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ + check-am clean clean-generic cscopelist-am ctags ctags-am \ + distclean distclean-generic distclean-tags distdir dvi dvi-am \ + html html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ - install-ps install-ps-am install-pythonscriptsDATA \ - install-strip installcheck installcheck-am installdirs \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ - uninstall-am uninstall-pythonscriptsDATA + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs installdirs-am maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-generic pdf \ + pdf-am ps ps-am tags tags-am uninstall uninstall-am .PRECIOUS: Makefile diff --git a/scripts/python/examples/Makefile.am b/scripts/python/examples/Makefile.am new file mode 100644 index 0000000000..e0461a3564 --- /dev/null +++ b/scripts/python/examples/Makefile.am @@ -0,0 +1,39 @@ +## Makefile.am -- Process this file with automake to produce Makefile.in +## Copyright (C) 2000, 2006 Gary V. Vaughan +## +## This program is free software; you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 2, or (at your option) +## any later version. +## +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; if not, write to the Free Software +## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +## MA 02110-1301, USA. + +# SUBDIRS = include + + +## Example of how to Install outside of $(pkgdatadir) +## scriptsrootdir = $(prefix)/share/scripts +## pythonscriptsdir = ${scriptsrootdir}/python + +pythonexamplesdir = $(pkgdatadir)/python/examples + +pythonexamples_DATA = \ + derive_WRF_semilatlon.py \ + read_ascii_mpr.py \ + read_ascii_numpy_grid.py \ + read_ascii_numpy.py \ + read_ascii_point.py \ + read_ascii_xarray.py \ + read_met_point_obs.py + +EXTRA_DIST = ${pythonexamples_DATA} + +MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/examples/Makefile.in b/scripts/python/examples/Makefile.in new file mode 100644 index 0000000000..ad4832e5a0 --- /dev/null +++ b/scripts/python/examples/Makefile.in @@ -0,0 +1,527 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# SUBDIRS = include + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = scripts/python/examples +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pythonexamplesdir)" +DATA = $(pythonexamples_DATA) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +am__DIST_COMMON = $(srcdir)/Makefile.in +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +pythonexamplesdir = $(pkgdatadir)/python/examples +pythonexamples_DATA = \ + derive_WRF_semilatlon.py \ + read_ascii_mpr.py \ + read_ascii_numpy_grid.py \ + read_ascii_numpy.py \ + read_ascii_point.py \ + read_ascii_xarray.py \ + read_met_point_obs.py + +EXTRA_DIST = ${pythonexamples_DATA} +MAINTAINERCLEANFILES = Makefile.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/examples/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign scripts/python/examples/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-pythonexamplesDATA: $(pythonexamples_DATA) + @$(NORMAL_INSTALL) + @list='$(pythonexamples_DATA)'; test -n "$(pythonexamplesdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pythonexamplesdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pythonexamplesdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonexamplesdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonexamplesdir)" || exit $$?; \ + done + +uninstall-pythonexamplesDATA: + @$(NORMAL_UNINSTALL) + @list='$(pythonexamples_DATA)'; test -n "$(pythonexamplesdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pythonexamplesdir)'; $(am__uninstall_files_from_dir) +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(DATA) +installdirs: + for dir in "$(DESTDIR)$(pythonexamplesdir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-pythonexamplesDATA + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-pythonexamplesDATA + +.MAKE: install-am install-strip + +.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ + ctags-am distclean distclean-generic distdir dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-pythonexamplesDATA \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ + uninstall-am uninstall-pythonexamplesDATA + +.PRECIOUS: Makefile + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/scripts/python/derive_WRF_semilatlon.py b/scripts/python/examples/derive_WRF_semilatlon.py similarity index 100% rename from scripts/python/derive_WRF_semilatlon.py rename to scripts/python/examples/derive_WRF_semilatlon.py diff --git a/scripts/python/examples/read_ascii_mpr.py b/scripts/python/examples/read_ascii_mpr.py new file mode 100644 index 0000000000..d166893c98 --- /dev/null +++ b/scripts/python/examples/read_ascii_mpr.py @@ -0,0 +1,34 @@ +import os +import sys +from met.mprbase import mpr_data + + +######################################################################## + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 2: + print("ERROR: read_ascii_mpr.py -> Must specify exactly one input file.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +try: + print("Input File:\t" + repr(input_file)) + + # Read MPR lines by using the Pandas Python package, + # skipping the header row and first column. + # Input should be a 36 column text data matching the MPR line-type + # output from MET tools. + mpr_data = mpr_data.read_mpr(input_file, col_start=1, col_last=36, skiprows=1) + print("Data Length:\t" + repr(len(mpr_data))) + print("Data Type:\t" + repr(type(mpr_data))) +except NameError: + print("Can't find the input file") + +######################################################################## diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py new file mode 100644 index 0000000000..a15fe17031 --- /dev/null +++ b/scripts/python/examples/read_ascii_numpy.py @@ -0,0 +1,85 @@ +import os +import sys +from met.dataplane import dataplane + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + met_data = None + print("Can't find the input file") + +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' + +grid_lambert_conformal = { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, +} + +long_name = data_name + "_word" +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_lambert_conformal, long_name) + +print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_numpy_grid.py b/scripts/python/examples/read_ascii_numpy_grid.py new file mode 100644 index 0000000000..79e6829052 --- /dev/null +++ b/scripts/python/examples/read_ascii_numpy_grid.py @@ -0,0 +1,64 @@ +import os +import sys +from met.dataplane import dataplane + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +try: + # Print some output to verify that this script ran + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + print("Can't find the input file") + +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' + +## create the metadata dictionary from the environment variable, +grid_info = os.path.expandvars(os.getenv('PYTHON_GRID')) + +long_name = data_name + "_word" +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_info, long_name) + +print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_point.py b/scripts/python/examples/read_ascii_point.py new file mode 100644 index 0000000000..88e9e31ce6 --- /dev/null +++ b/scripts/python/examples/read_ascii_point.py @@ -0,0 +1,51 @@ +import os +import sys + +from met.point import met_point_tools + +######################################################################## + +print("Python Script:\t" + repr(sys.argv[0])) + +## +## input file specified on the command line +## load the data into the numpy array +## + +arg_cnt = len(sys.argv) +if arg_cnt < 2: + print("ERROR: read_ascii_point.py -> Missing an input file.") + sys.exit(1) + +last_index = 2 +if last_index < arg_cnt: + print(" INFO: read_ascii_point.py -> Too many argument, ignored {o}.".format( + o=' '.join(sys.argv[last_index:]))) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +try: + print("Input File:\t" + repr(input_file)) + + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + + # Read 11 column text input data by using pandas package + point_data = met_point_tools.read_text_point_obs(input_file) + print(" point_data: Data Length:\t" + repr(len(point_data))) + print(" point_data: Data Type:\t" + repr(type(point_data))) +except FileNotFoundError: + print(f"The input file {input_file} does not exist") + sys.exit(1) + +######################################################################## diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py new file mode 100644 index 0000000000..8998235ea1 --- /dev/null +++ b/scripts/python/examples/read_ascii_xarray.py @@ -0,0 +1,109 @@ +import os +import sys +import xarray as xr +from met.dataplane import dataplane + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + met_data = None + print("Can't read the input file") + +########################################### + + ## + ## create the metadata dictionary + ## + +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' + +grid_lambert_conformal = { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, +} + +long_name = data_name + "_word" +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_lambert_conformal) + +print("Attributes:\t" + repr(attrs)) + +# Create an xarray DataArray object +da = xr.DataArray(met_data) +ds = xr.Dataset({"fcst":da}) + +# Add the attributes to the dataarray object +ds.attrs = attrs + +# Delete the local variable attrs to mimic the real world, +# where a user will rely on da.attrs rather than construct it themselves +del attrs + +# Delete the met_data variable, and reset it to be the Xarray object +del met_data + +# Create met_data and specify attrs because XR doesn't persist them. +met_data = xr.DataArray(ds.fcst, attrs=ds.attrs) diff --git a/scripts/python/examples/read_met_point_obs.py b/scripts/python/examples/read_met_point_obs.py new file mode 100644 index 0000000000..e16ccf2d86 --- /dev/null +++ b/scripts/python/examples/read_met_point_obs.py @@ -0,0 +1,61 @@ +''' +Created on Nov 10, 2021 + +@author: hsoh + +This script reads the MET point observation NetCDF file like MET tools do. + +Usage: + + python3 read_met_point_obs.py + python3 read_met_point_obs.py + : 11 columns + 'typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs' + string columns: 'typ', 'sid', 'vld', 'var', , 'qc' + numeric columns: 'lat', 'lon', 'elv', 'lvl', 'hgt', 'qc', 'obs' + +''' + +import os +import sys +from datetime import datetime + +from met.point import met_point_tools +from pyembed.python_embedding import pyembed_tools + +ARG_PRINT_DATA = 'show_data' +DO_PRINT_DATA = ARG_PRINT_DATA == sys.argv[-1] + +start_time = datetime.now() + +point_obs_data = None +input_name = sys.argv[1] if len(sys.argv) > 1 else None +prompt = met_point_tools.get_prompt() +if len(sys.argv) == 1 or ARG_PRINT_DATA == input_name: + # This is an example of creating a sample data + point_obs_data = met_point_tools.get_sample_point_obs() + point_obs_data.read_data([]) +elif met_point_tools.is_python_prefix(input_name): + # This is an example of calling a python script for ascii2nc + point_obs_data = pyembed_tools.call_python(sys.argv) +else: + # This is an example of reading MET's point observation NetCDF file + # from ascii2nc, madis2nc, and pb2nc + netcdf_filename = os.path.expandvars(input_name) + args = [ netcdf_filename ] + #args = { 'nc_name': netcdf_filename } + point_obs_data = met_point_tools.get_nc_point_obs() + point_obs_data.read_data(point_obs_data.get_nc_filename(args)) + +if point_obs_data is not None: + met_point_data = point_obs_data.get_point_data() + met_point_data['met_point_data'] = point_obs_data + print("met_point_data: ", met_point_data) + print(met_point_data) + + if DO_PRINT_DATA: + point_obs_data.dump() + +run_time = datetime.now() - start_time + +print('{p} Done python script {s} took {t}'.format(p=prompt, s=sys.argv[0], t=run_time)) diff --git a/scripts/python/met/Makefile.am b/scripts/python/met/Makefile.am new file mode 100644 index 0000000000..9e430722af --- /dev/null +++ b/scripts/python/met/Makefile.am @@ -0,0 +1,34 @@ +## Makefile.am -- Process this file with automake to produce Makefile.in +## Copyright (C) 2000, 2006 Gary V. Vaughan +## +## This program is free software; you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 2, or (at your option) +## any later version. +## +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; if not, write to the Free Software +## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +## MA 02110-1301, USA. + +# SUBDIRS = include + +## Example of how to Install outside of $(pkgdatadir) +## scriptsrootdir = $(prefix)/share/scripts +## pythonscriptsdir = ${scriptsrootdir}/python + +pythonmetscriptsdir = $(pkgdatadir)/python/met + +pythonmetscripts_DATA = \ + dataplane.py \ + mprbase.py \ + point.py + +EXTRA_DIST = ${pythonmetscripts_DATA} + +MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/met/Makefile.in b/scripts/python/met/Makefile.in new file mode 100644 index 0000000000..488e85355e --- /dev/null +++ b/scripts/python/met/Makefile.in @@ -0,0 +1,523 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# SUBDIRS = include + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = scripts/python/met +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pythonmetscriptsdir)" +DATA = $(pythonmetscripts_DATA) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +am__DIST_COMMON = $(srcdir)/Makefile.in +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +pythonmetscriptsdir = $(pkgdatadir)/python/met +pythonmetscripts_DATA = \ + dataplane.py \ + mprbase.py \ + point.py + +EXTRA_DIST = ${pythonmetscripts_DATA} +MAINTAINERCLEANFILES = Makefile.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/met/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign scripts/python/met/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-pythonmetscriptsDATA: $(pythonmetscripts_DATA) + @$(NORMAL_INSTALL) + @list='$(pythonmetscripts_DATA)'; test -n "$(pythonmetscriptsdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pythonmetscriptsdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pythonmetscriptsdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonmetscriptsdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonmetscriptsdir)" || exit $$?; \ + done + +uninstall-pythonmetscriptsDATA: + @$(NORMAL_UNINSTALL) + @list='$(pythonmetscripts_DATA)'; test -n "$(pythonmetscriptsdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pythonmetscriptsdir)'; $(am__uninstall_files_from_dir) +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(DATA) +installdirs: + for dir in "$(DESTDIR)$(pythonmetscriptsdir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-pythonmetscriptsDATA + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-pythonmetscriptsDATA + +.MAKE: install-am install-strip + +.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ + ctags-am distclean distclean-generic distdir dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-pythonmetscriptsDATA \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ + uninstall-am uninstall-pythonmetscriptsDATA + +.PRECIOUS: Makefile + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py new file mode 100644 index 0000000000..a9aed943b6 --- /dev/null +++ b/scripts/python/met/dataplane.py @@ -0,0 +1,118 @@ +import os +import numpy as np +import netCDF4 as nc + +########################################### + +class dataplane(): + + ## + ## create the metadata dictionary + ## + + #@staticmethod + # Python dictionary items: + # 'name': data name + # 'long_name': descriptive name + # 'valid': valid time (format = 'yyyymmdd_hhmmss') + # 'init': init time (format = 'yyyymmdd_hhmmss') + # 'lead': lead time (format = 'hhmmss') + # 'accum': accumulation time (format = 'hhmmss') + # 'level': vertilcal level + # 'units': units of the data + # 'grid': contains the grid information + # - a grid name (G212) + # - a gridded data file name + # - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" + # - a dictionary for the grid information + def set_dataplane_attrs(data_name, valid_time, init_time, lead_time, + accum_time, v_level, units, grid_info, long_name=None): + hdr_attrs = { + + 'valid': valid_time, + 'init': init_time, + 'lead': lead_time, + 'accum': accum_time, + + 'name': data_name, + 'long_name': long_name if long_name is not None and long_name != "" else data_name + '_long', + 'level': v_level, + 'units': units, + + 'grid': grid_info + + } + return hdr_attrs + + #@staticmethod + def read_2d_text_input(input_file): + if os.path.exists(input_file): + met_data = np.loadtxt(input_file) + else: + met_data = None + return met_data + + #@staticmethod + def read_dataplane(netcdf_filename): + # read NetCDF file + ds = nc.Dataset(netcdf_filename, 'r') + met_data = ds['met_data'][:] + met_attrs = {} + + # grid is defined as a dictionary or string + grid = {} + for attr, attr_val in ds.__dict__.items(): + if 'grid.' in attr: + grid_attr = attr.split('.')[1] + grid[grid_attr] = attr_val + else: + met_attrs[attr] = attr_val + + if grid: + met_attrs['grid'] = grid + + met_attrs['name'] = met_attrs['name_str'] + del met_attrs['name_str'] + + met_info = {} + met_info['met_data'] = met_data + met_info['attrs'] = met_attrs + return met_info + + #@staticmethod + def write_dataplane(met_in, netcdf_filename): + met_info = {'met_data': met_in.met_data} + if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: + attrs = met_in.met_data.attrs + else: + attrs = met_in.attrs + met_info['attrs'] = attrs + + # determine fill value + try: + fill = met_in.met_data.get_fill_value() + except: + fill = -9999. + + # write NetCDF file + ds = nc.Dataset(netcdf_filename, 'w') + + # create dimensions and variable + nx, ny = met_in.met_data.shape + ds.createDimension('x', nx) + ds.createDimension('y', ny) + dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) + dp[:] = met_in.met_data + + # append attributes + for attr, attr_val in met_info['attrs'].items(): + if attr == 'name': + setattr(ds, 'name_str', attr_val) + elif type(attr_val) == dict: + for key in attr_val: + setattr(ds, attr + '.' + key, attr_val[key]) + else: + setattr(ds, attr, attr_val) + + ds.close() + diff --git a/scripts/python/met/mprbase.py b/scripts/python/met/mprbase.py new file mode 100644 index 0000000000..57e5769015 --- /dev/null +++ b/scripts/python/met/mprbase.py @@ -0,0 +1,21 @@ +import pandas as pd + +######################################################################## + +class mpr_data(): + + # Read a text file with N columns and returns the list of N column data + # Skip first "col_start" columns if col_start is not 0. + def read_mpr(input_file, col_last, col_start = 0, header=None, + delim_whitespace=True, keep_default_na=False, + skiprows=1, dtype=str): + mpr_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + skiprows=skiprows, + usecols=range(col_start,col_last+1), + dtype=dtype).values.tolist() + return mpr_data + + +######################################################################## diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py new file mode 100644 index 0000000000..10c93f91aa --- /dev/null +++ b/scripts/python/met/point.py @@ -0,0 +1,983 @@ +''' +Created on Nov 10, 2021 + +@author: hsoh + +- This is the base class and the customized script should extend the met_point_obs. +- The customized script (for example "custom_reader") must implement + "def read_data(self, args)" which fills the array variables at __init__(). +- The args can be 1) single string argument, 2) the list of arguments, + or 3) the dictionary of arguments. +- Either "point_data" or "met_point_data" python object (variable) must set: + + "point_data" is from 11 column text input + + "met_point_data" is array of neaders and observation data. + + "point_obs_data" is an optional to use custom python EXE. + It's a python instance which processes the point observation data +- The customized script is expected to include following codes: + + + Note: csv_point_obs is an example of met_point_data, not point_data + + + Example of "point_data": see met_point_tools.read_text_point_obs() + +def read_custom_data(data_filename): + # Implemente here + return the array of 11 column data + +# prepare arguments for the customized script +data_filename = sys.arg[1] +point_data = read_custom_data(data_filename) + + + + Example of "met_point_data": see csv_point_obs + +from met.point import met_point_obs + +class custom_reader(met_point_obs): + + def read_data(data_filename): + # Implemente here + +# prepare arguments for the customized script +data_filename = sys.argv[1] +point_obs_data = custom_reader() +point_obs_data.read_data(data_filename) +met_point_data = point_obs_data.get_point_data() + +''' + +import os +from abc import ABC, abstractmethod + +import numpy as np +import netCDF4 as nc +import pandas as pd + +COUNT_SHOW = 30 + +class base_met_point_obs(object): + ''' + classdocs + ''' + ERROR_P = " ==PYTHON_ERROR==" + INFO_P = " ==PYTHON_INFO==" + + python_prefix = 'PYTHON_POINT_USER' + + FILL_VALUE = -9999. + + def __init__(self, use_var_id=True): + ''' + Constructor + ''' + self.count_info = "" + self.input_name = None + self.ignore_input_file = False + self.use_var_id = use_var_id # True if variable index, False if GRIB code + self.error_msg = "" + self.has_error = False + + # Header + self.nhdr = 0 + self.npbhdr = 0 + self.nhdr_typ = 0 # type table + self.nhdr_sid = 0 # station_id table + self.nhdr_vld = 0 # valid time strings + self.hdr_typ = [] # (nhdr) integer + self.hdr_sid = [] # (nhdr) integer + self.hdr_vld = [] # (nhdr) integer + self.hdr_lat = [] # (nhdr) float + self.hdr_lon = [] # (nhdr) float + self.hdr_elv = [] # (nhdr) float + self.hdr_typ_table = [] # (nhdr_typ, mxstr2) string + self.hdr_sid_table = [] # (nhdr_sid, mxstr2) string + self.hdr_vld_table = [] # (nhdr_vld, mxstr) string + + #Observation data + self.nobs = 0 + self.nobs_qty = 0 + self.nobs_var = 0 + self.obs_qty = [] # (nobs_qty) integer, index of self.obs_qty_table + self.obs_hid = [] # (nobs) integer + self.obs_vid = [] # (nobs) integer, veriable index from self.obs_var_table or GRIB code + self.obs_lvl = [] # (nobs) float + self.obs_hgt = [] # (nobs) float + self.obs_val = [] # (nobs) float + self.obs_qty_table = [] # (nobs_qty, mxstr) string + self.obs_var_table = [] # (nobs_var, mxstr2) string, required if self.use_var_id is True + self.obs_var_unit = [] # (nobs_var, mxstr2) string, optional if self.use_var_id is True + self.obs_var_desc = [] # (nobs_var, mxstr3) string, optional if self.use_var_id is True + + # Optional variables for PREPBUFR, not supported yet + self.hdr_prpt_typ = [] # optional + self.hdr_irpt_typ = [] # optional + self.hdr_inst_typ = [] # optional + + def add_error_msg(self, error_msg): + self.has_error = True + self.log_error_msg(error_msg) + if 0 == len(self.error_msg): + self.error_msg = error_msg + else: + self.error_msg = "{m1}\n{m2}".format(m1=self.error_msg, m2=error_msg) + + def add_error_msgs(self, error_msgs): + self.has_error = True + for error_msg in error_msgs: + self.add_error_msg(error_msg) + + def check_data_member_float(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (float)".format(v=var_name)) + elif isinstance(local_var, list): + if isinstance(local_var[0], str) and not self.is_number(local_var[0]): + self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int or float only)".format( + n=var_name, v=local_var[0])) + elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], (int, float)): + self.add_error_msg("Not supported data type ({t}) for {v}[0] (int or float only)".format( + v=var_name, t=type(local_var[0]))) + elif not self.is_numpy_array(local_var): + self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( + v=var_name, t=type(local_var))) + + def check_data_member_int(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (int)".format(v=var_name)) + elif isinstance(local_var, list): + if isinstance(local_var[0], str) and not self.is_number(local_var[0]): + self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int only)".format( + n=var_name, v=local_var[0])) + elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], int): + self.add_error_msg("Not supported data type ({t}) for {v}[0] (int only)".format( + v=var_name, t=type(local_var[0]))) + elif not self.is_numpy_array(local_var): + self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( + v=var_name, t=type(local_var))) + + def check_data_member_string(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (string)".format(v=var_name)) + elif not isinstance(local_var, (list)): + self.add_error_msg("Not supported data type ({t}) for {v} (list)".format( + v=var_name, t=type(local_var))) + + def check_point_data(self): + if not self.ignore_input_file and self.input_name is not None and not os.path.exists(self.input_name): + self.add_error_msg('The netcdf input {f} does not exist'.format(f=self.input_name)) + else: + self.check_data_member_int(self.hdr_typ,'hdr_typ') + self.check_data_member_int(self.hdr_sid,'hdr_sid') + self.check_data_member_int(self.hdr_vld,'hdr_vld') + self.check_data_member_float(self.hdr_lat,'hdr_lat') + self.check_data_member_float(self.hdr_lon,'hdr_lon') + self.check_data_member_float(self.hdr_elv,'hdr_elv') + self.check_data_member_string(self.hdr_typ_table,'hdr_typ_table') + self.check_data_member_string(self.hdr_sid_table,'hdr_sid_table') + self.check_data_member_string(self.hdr_vld_table,'hdr_vld_table') + + self.check_data_member_int(self.obs_qty,'obs_qty') + self.check_data_member_int(self.obs_hid,'obs_hid') + self.check_data_member_int(self.obs_vid,'obs_vid') + self.check_data_member_float(self.obs_lvl,'obs_lvl') + self.check_data_member_float(self.obs_hgt,'obs_hgt') + self.check_data_member_float(self.obs_val,'obs_val') + self.check_data_member_string(self.obs_qty_table,'obs_qty_table') + if self.use_var_id: + self.check_data_member_string(self.obs_var_table,'obs_var_table') + + def convert_to_numpy(self, value_list): + return np.array(value_list) + + def dump(self): + base_met_point_obs.print_point_data(self.get_point_data()) + + def get_count_string(self): + return f' nobs={self.nobs} nhdr={self.nhdr} ntyp={self.nhdr_typ} nsid={self.nhdr_sid} nvld={self.nhdr_vld} nqty={self.nobs_qty} nvar={self.nobs_var}' + + def get_point_data(self): + if self.nhdr <= 0: + self.nhdr = len(self.hdr_lat) + if self.nobs <= 0: + self.nobs = len(self.obs_val) + if self.nhdr_typ <= 0: + self.nhdr_typ = len(self.hdr_typ_table) + if self.nhdr_sid <= 0: + self.nhdr_sid = len(self.hdr_sid_table) + if self.nhdr_vld <= 0: + self.nhdr_vld = len(self.hdr_vld_table) + if self.npbhdr <= 0: + self.npbhdr = len(self.hdr_prpt_typ) + if self.nobs_qty <= 0: + self.nobs_qty = len(self.obs_qty_table) + if self.nobs_var <= 0: + self.nobs_var = len(self.obs_var_table) + self.check_point_data() + + if not self.is_numpy_array(self.hdr_typ): + self.hdr_typ = self.convert_to_numpy(self.hdr_typ) + if not self.is_numpy_array(self.hdr_sid): + self.hdr_sid = self.convert_to_numpy(self.hdr_sid) + if not self.is_numpy_array(self.hdr_vld): + self.hdr_vld = self.convert_to_numpy(self.hdr_vld) + if not self.is_numpy_array(self.hdr_lat): + self.hdr_lat = self.convert_to_numpy(self.hdr_lat) + if not self.is_numpy_array(self.hdr_lon): + self.hdr_lon = self.convert_to_numpy(self.hdr_lon) + if not self.is_numpy_array(self.hdr_elv): + self.hdr_elv = self.convert_to_numpy(self.hdr_elv) + + if not self.is_numpy_array(self.obs_qty): + self.obs_qty = self.convert_to_numpy(self.obs_qty) + if not self.is_numpy_array(self.obs_hid): + self.obs_hid = self.convert_to_numpy(self.obs_hid) + if not self.is_numpy_array(self.obs_vid): + self.obs_vid = self.convert_to_numpy(self.obs_vid) + if not self.is_numpy_array(self.obs_lvl): + self.obs_lvl = self.convert_to_numpy(self.obs_lvl) + if not self.is_numpy_array(self.obs_hgt): + self.obs_hgt = self.convert_to_numpy(self.obs_hgt) + if not self.is_numpy_array(self.obs_val): + self.obs_val = self.convert_to_numpy(self.obs_val) + + self.count_info = self.get_count_string() + self.met_point_data = self + return self.__dict__ + + def is_number(self, num_str): + return num_str.replace('-','1').replace('+','2').replace('.','3').isdigit() + + def is_numpy_array(self, var): + return isinstance(var, np.ndarray) + + def log_error_msg(self, err_msg): + base_met_point_obs.error_msg(err_msg) + + def log_error(self, err_msgs): + print(self.ERROR_P) + for err_line in err_msgs.split('\n'): + self.log_error_msg(err_line) + print(self.ERROR_P) + + def log_info(self, info_msg): + base_met_point_obs.info_msg(info_msg) + + def put_data(self, point_obs_dict): + self.use_var_id = point_obs_dict['use_var_id'] + self.hdr_typ = point_obs_dict['hdr_typ'] + self.hdr_sid = point_obs_dict['hdr_sid'] + self.hdr_vld = point_obs_dict['hdr_vld'] + self.hdr_lat = point_obs_dict['hdr_lat'] + self.hdr_lon = point_obs_dict['hdr_lon'] + self.hdr_elv = point_obs_dict['hdr_elv'] + self.hdr_typ_table = point_obs_dict['hdr_typ_table'] + self.hdr_sid_table = point_obs_dict['hdr_sid_table'] + self.hdr_vld_table = point_obs_dict['hdr_vld_table'] + + #Observation data + self.obs_qty = point_obs_dict['obs_qty'] + self.obs_hid = point_obs_dict['obs_hid'] + self.obs_lvl = point_obs_dict['obs_lvl'] + self.obs_hgt = point_obs_dict['obs_hgt'] + self.obs_val = point_obs_dict['obs_val'] + self.obs_vid = point_obs_dict['obs_vid'] + self.obs_var_table = point_obs_dict['obs_var_table'] + self.obs_qty_table = point_obs_dict['obs_qty_table'] + po_array = point_obs_dict.get('obs_unit', None) + if po_array is not None: + self.obs_var_unit = po_array + po_array = point_obs_dict.get('obs_desc', None) + if po_array is not None: + self.obs_var_desc = po_array + + po_array = point_obs_dict.get('hdr_prpt_typ', None) + if po_array is not None: + self.hdr_prpt_typ = po_array + po_array = point_obs_dict.get('hdr_irpt_typ', None) + if po_array is not None: + self.hdr_irpt_typ = po_array + po_array = point_obs_dict.get('hdr_inst_typ', None) + if po_array is not None: + self.hdr_inst_typ = po_array + + @staticmethod + def get_prompt(): + return " python:" + + @staticmethod + def error_msg(msg): + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.ERROR_P} {msg}') + + @staticmethod + def info_msg(msg): + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.INFO_P} {msg}') + + @staticmethod + def get_python_script(arg_value): + return arg_value[len(met_point_obs.python_prefix)+1:] + + @staticmethod + def is_python_script(arg_value): + return arg_value.startswith(met_point_obs.python_prefix) + + @staticmethod + def print_data(key, data_array, show_count=COUNT_SHOW): + if isinstance(data_array, list): + data_len = len(data_array) + if show_count >= data_len: + print(" {k:10s}: {v}".format(k=key, v= data_array)) + else: + end_offset = int(show_count/2) + print(" {k:10s}: count={v}".format(k=key, v=data_len)) + print(" {k:10s}[0:{o}] {v}".format(k=key, v=data_array[:end_offset], o=end_offset)) + print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v='...', s=end_offset+1, e=data_len-end_offset-1)) + print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v= data_array[-end_offset:], s=(data_len-end_offset), e=(data_len-1))) + else: + print(" {k:10s}: {v}".format(k=key, v= data_array)) + + @staticmethod + def print_point_data(met_point_data, print_subset=True): + print(' === MET point data by python embedding ===') + if print_subset: + met_point_obs.print_data('nhdr',met_point_data['nhdr']) + met_point_obs.print_data('nobs',met_point_data['nobs']) + met_point_obs.print_data('use_var_id',met_point_data['use_var_id']) + met_point_obs.print_data('hdr_typ',met_point_data['hdr_typ']) + met_point_obs.print_data('hdr_typ_table',met_point_data['hdr_typ_table']) + met_point_obs.print_data('hdr_sid',met_point_data['hdr_sid']) + met_point_obs.print_data('hdr_sid_table',met_point_data['hdr_sid_table']) + met_point_obs.print_data('hdr_vld',met_point_data['hdr_vld']) + met_point_obs.print_data('hdr_vld_table',met_point_data['hdr_vld_table']) + met_point_obs.print_data('hdr_lat',met_point_data['hdr_lat']) + met_point_obs.print_data('hdr_lon',met_point_data['hdr_lon']) + met_point_obs.print_data('hdr_elv',met_point_data['hdr_elv']) + met_point_obs.print_data('obs_hid',met_point_data['obs_hid']) + met_point_obs.print_data('obs_vid',met_point_data['obs_vid']) + met_point_obs.print_data('obs_var_table',met_point_data['obs_var_table']) + met_point_obs.print_data('obs_qty',met_point_data['obs_qty']) + met_point_obs.print_data('obs_qty_table',met_point_data['obs_qty_table']) + met_point_obs.print_data('obs_lvl',met_point_data['obs_lvl']) + met_point_obs.print_data('obs_hgt',met_point_data['obs_hgt']) + met_point_obs.print_data('obs_val',met_point_data['obs_val']) + else: + print('All',met_point_data) + print(" nhdr: ",met_point_data['nhdr']) + print(" nobs: ",met_point_data['nobs']) + print(' use_var_id: ',met_point_data['use_var_id']) + print(' hdr_typ: ',met_point_data['hdr_typ']) + print('hdr_typ_table: ',met_point_data['hdr_typ_table']) + print(' hdr_sid: ',met_point_data['hdr_sid']) + print('hdr_sid_table: ',met_point_data['hdr_sid_table']) + print(' hdr_vld: ',met_point_data['hdr_vld']) + print('hdr_vld_table: ',met_point_data['hdr_vld_table']) + print(' hdr_lat: ',met_point_data['hdr_lat']) + print(' hdr_lon: ',met_point_data['hdr_lon']) + print(' hdr_elv: ',met_point_data['hdr_elv']) + print(' obs_hid: ',met_point_data['obs_hid']) + print(' obs_vid: ',met_point_data['obs_vid']) + print('obs_var_table: ',met_point_data['obs_var_table']) + print(' obs_qty: ',met_point_data['obs_qty']) + print('obs_qty_table: ',met_point_data['obs_qty_table']) + print(' obs_lvl: ',met_point_data['obs_lvl']) + print(' obs_hgt: ',met_point_data['obs_hgt']) + print(' obs_val: ',met_point_data['obs_val']) + + print(' === MET point data by python embedding ===') + + +class csv_point_obs(base_met_point_obs): + + def __init__(self, point_data): + self.point_data = point_data + super(csv_point_obs, self).__init__() + + self.obs_cnt = obs_cnt = len(point_data) + self.obs_qty = [ 0 for _ in range(0, obs_cnt) ] # (nobs_qty) integer, index of self.obs_qty_table + self.obs_hid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer + self.obs_vid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer, veriable index from self.obs_var_table or GRIB code + self.obs_lvl = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + self.obs_hgt = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + self.obs_val = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + + self.convert_point_data() + + def check_csv_record(self, csv_point_data, index): + error_msgs = [] + # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + # dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'} + if 11 > len(csv_point_data): + error_msgs.append("{i}-th data: missing columns. should be 11 columns, not {c} columns".format( + i=index, c=len(csv_point_data))) + elif 11 < len(csv_point_data): + print("{i}-th data: ignore after 11-th columns out of {c} columns".format( + i=index, c=len(csv_point_data))) + if not isinstance(csv_point_data[0], str): + error_msgs.append("{i}-th data: message_type is not string".format(i=index)) + if not isinstance(csv_point_data[1], str): + error_msgs.append("{i}-th data: station_id is not string".format(i=index)) + if not isinstance(csv_point_data[2], str): + error_msgs.append("{i}-th data: valid_time is not string".format(i=index)) + if isinstance(csv_point_data[3], str): + error_msgs.append("{i}-th data: latitude can not be a string".format(i=index)) + elif csv_point_data[3] < -90.0 or csv_point_data[3] > 90.0: + error_msgs.append("{i}-th data: latitude ({l}) is out of range".format(i=index, l=csv_point_data[3])) + if isinstance(csv_point_data[4], str): + error_msgs.append("{i}-th data: longitude can not be a string".format(i=index)) + elif csv_point_data[4] < -180.0 or csv_point_data[4] > 360.0: + error_msgs.append("{i}-th data: longitude ({l}) is out of range".format(i=index, l=csv_point_data[4])) + if not isinstance(csv_point_data[6], str): + error_msgs.append("{i}-th data: grib_code/var_name is not string".format(i=index)) + if not isinstance(csv_point_data[9], str): + error_msgs.append("{i}-th data: quality_mark is not string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[5]) + if is_string and not is_num: + error_msgs.append("{i}-th data: elevation: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[7]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_level: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[8]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_height: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[10]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_value: only NA is accepted as string".format(i=index)) + return error_msgs + + def check_csv_point_data(self, all_records=False): + if 0 == len(self.point_data): + self.add_error_msg("No data!") + elif all_records: + data_idx = 0 + for csv_point_data in self.point_data: + data_idx += 1 + error_messages = self.check_csv_record(csv_point_data, data_idx) + if len(error_messages) > 0: + self.add_error_msgs(error_messages) + else: + error_messages = self.check_csv_record(self.point_data[0], index=1) + if len(error_messages) > 0: + self.add_error_msgs(error_messages) + if 1 < len(self.point_data): + error_messages = self.check_csv_record(self.point_data[-1], index=len(self.point_data)) + if len(error_messages) > 0: + self.add_error_msgs(error_messages) + + def convert_point_data(self): + hdr_cnt = hdr_typ_cnt = hdr_sid_cnt = hdr_vld_cnt = 0 + var_name_cnt = qc_cnt = 0 + + hdr_map = {} + hdr_typ_map = {} + hdr_sid_map = {} + hdr_vld_map = {} + obs_var_map = {} + obs_qty_map = {} + self.use_var_id = not self.is_grib_code() + + index = 0 + #names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + for csv_point_record in self.point_data: + # Build header map. + hdr_typ_str = csv_point_record[0] + hdr_typ_idx = hdr_typ_map.get(hdr_typ_str,-1) + if hdr_typ_idx < 0: + hdr_typ_idx = hdr_typ_cnt + hdr_typ_map[hdr_typ_str] = hdr_typ_idx + hdr_typ_cnt += 1 + + hdr_sid_str = csv_point_record[1] + hdr_sid_idx = hdr_sid_map.get(hdr_sid_str,-1) + if hdr_sid_idx < 0: + hdr_sid_idx = hdr_sid_cnt + hdr_sid_map[hdr_sid_str] = hdr_sid_idx + hdr_sid_cnt += 1 + + hdr_vld_str = csv_point_record[2] + hdr_vld_idx = hdr_vld_map.get(hdr_vld_str,-1) + if hdr_vld_idx < 0: + hdr_vld_idx = hdr_vld_cnt + hdr_vld_map[hdr_vld_str] = hdr_vld_idx + hdr_vld_cnt += 1 + + lat = csv_point_record[3] + lon = csv_point_record[4] + elv = self.get_num_value(csv_point_record[5] ) + hdr_key = (hdr_typ_idx,hdr_sid_idx,hdr_vld_idx,lat,lon,elv) + hdr_idx = hdr_map.get(hdr_key,-1) + if hdr_idx < 0: + hdr_idx = hdr_cnt + hdr_map[hdr_key] = hdr_idx + hdr_cnt += 1 + + var_id_str = csv_point_record[6] + if self.use_var_id: + var_id = obs_var_map.get(var_id_str,-1) + if var_id < 0: + var_id = var_name_cnt + obs_var_map[var_id_str] = var_id + var_name_cnt += 1 + else: + var_id = int(var_id_str) + + qc_str = csv_point_record[9] + qc_id = obs_qty_map.get(qc_str,-1) + if qc_id < 0: + qc_id = qc_cnt + obs_qty_map[qc_str] = qc_id + qc_cnt += 1 + + # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + self.obs_vid[index] = var_id + self.obs_hid[index] = hdr_idx + self.obs_lvl[index] = self.get_num_value(csv_point_record[7]) + self.obs_hgt[index] = self.get_num_value(csv_point_record[8]) + self.obs_val[index] = self.get_num_value(csv_point_record[10]) + self.obs_qty[index] = qc_id + + index += 1 + + self.nhdr = hdr_cnt + self.nhdr_typ = hdr_typ_cnt + self.nhdr_sid = hdr_sid_cnt + self.nhdr_vld = hdr_vld_cnt + self.nobs_var = var_name_cnt + self.nobs_qty = qc_cnt + + # Fill header array and table array based on the map + self.hdr_typ = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_sid = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_vld = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_lat = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + self.hdr_lon = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + self.hdr_elv = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + for key, idx in hdr_map.items(): + self.hdr_typ[idx] = key[0] + self.hdr_sid[idx] = key[1] + self.hdr_vld[idx] = key[2] + self.hdr_lat[idx] = key[3] + self.hdr_lon[idx] = key[4] + self.hdr_elv[idx] = key[5] + + self.hdr_typ_table = [ "" for _ in range(0, hdr_typ_cnt) ] + self.hdr_sid_table = [ "" for _ in range(0, hdr_sid_cnt) ] + self.hdr_vld_table = [ "" for _ in range(0, hdr_vld_cnt) ] + self.obs_qty_table = [ "" for _ in range(0, qc_cnt) ] + self.obs_var_table = [ "" for _ in range(0, var_name_cnt) ] + for key, idx in hdr_typ_map.items(): + self.hdr_typ_table[idx] = key + for key, idx in hdr_sid_map.items(): + self.hdr_sid_table[idx] = key + for key, idx in hdr_vld_map.items(): + self.hdr_vld_table[idx] = key + for key, idx in obs_qty_map.items(): + self.obs_qty_table[idx] = key + for key, idx in obs_var_map.items(): + self.obs_var_table[idx] = key + + def get_num_value(self, column_value): + num_value = column_value + if isinstance(column_value, str): + if self.is_number(column_value): + num_value = float(column_value) + else: + num_value = self.FILL_VALUE + if column_value.lower() != 'na' and column_value.lower() != 'n/a': + self.log_info(f'{column_value} is not a number, converted to the missing value') + return num_value + + def is_grib_code(self): + grib_code = True + for _point_data in self.point_data: + if isinstance(_point_data[6], int): + continue + elif isinstance(_point_data[6], str) and not _point_data[6].isdecimal(): + grib_code = False + break; + return grib_code + + def is_num_string(self, column_value): + is_string = isinstance(column_value, str) + if is_string: + is_num = True if self.is_number(column_value) or column_value.lower() == 'na' or column_value.lower() == 'n/a' else False + else: + is_num = True + return is_string, is_num + + +class met_point_obs(ABC, base_met_point_obs): + + MET_ENV_RUN = 'MET_FORCE_TO_RUN' + + @abstractmethod + def read_data(self, args): + # args can be input_file_name, list, or dictionary + # - The variables at __init__ should be filled as python list or numpy array + # - set self.input_name + # + # Here is a template + ''' + if isinstance(args, dict): + in_filename = args.get('in_name',None) + elif isinstance(args, list): + in_filename = args[0] + else: + in_filename = args + self.input_name = in_filename + ''' + pass + + +class met_point_tools(): + + @staticmethod + def convert_point_data(point_data, check_all_records=False, input_type='csv'): + tmp_point_data = {} + if 'csv' == input_type: + csv_point_data = csv_point_obs(point_data) + csv_point_data.check_csv_point_data(check_all_records) + tmp_point_data = csv_point_data.get_point_data() + else: + base_met_point_obs.error_msg('Not supported input type: {input_type}') + return tmp_point_data + + @staticmethod + def get_prompt(): + return " python:" + + @staticmethod + def get_nc_point_obs(): + return nc_point_obs() + + @staticmethod + def get_sample_point_obs(): + return sample_met_point_obs() + + @staticmethod + def is_python_prefix(user_cmd): + return user_cmd.startswith(base_met_point_obs.python_prefix) + + @staticmethod + # Read the input file which is 11 column text file as the first argument + def read_text_point_obs(input_file, header=None, + delim_whitespace=True, keep_default_na=False): + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + ascii_point_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], + dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + return ascii_point_data + +# Note: caller should import netCDF4 +# The argements nc_group(dataset) and nc_var should not be None +class nc_tools(): + + met_missing = -99999999. + + @staticmethod + def get_num_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_var[:] + + @staticmethod + def get_ncbyte_array_to_str(nc_var): + nc_str_data = nc_var[:] + if nc_var.datatype.name == 'bytes8': + nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] + return nc_str_data + + @staticmethod + def get_string_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) + + +class nc_point_obs(met_point_obs): + + # args should be string, list, or dictionary + def get_nc_filename(self, args): + nc_filename = None + if isinstance(args, dict): + nc_filename = args.get('nc_name',None) + elif isinstance(args, list): + nc_filename = args[0] + elif args != ARG_PRINT_DATA: + nc_filename = args + + return nc_filename + + def read_data(self, nc_filename): + if nc_filename is None: + self.log_error_msg("The input NetCDF filename is missing") + elif not os.path.exists(nc_filename): + self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") + else: + dataset = nc.Dataset(nc_filename, 'r') + + attr_name = 'use_var_id' + use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" + self.use_var_id = use_var_id_str.lower() == 'true' + + # Header + self.hdr_typ = dataset['hdr_typ'][:] + self.hdr_sid = dataset['hdr_sid'][:] + self.hdr_vld = dataset['hdr_vld'][:] + self.hdr_lat = dataset['hdr_lat'][:] + self.hdr_lon = dataset['hdr_lon'][:] + self.hdr_elv = dataset['hdr_elv'][:] + self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') + self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') + self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') + + nc_var = dataset.variables.get('obs_unit', None) + if nc_var: + self.obs_var_unit = nc_var[:] + nc_var = dataset.variables.get('obs_desc', None) + if nc_var: + self.obs_var_desc = nc_var[:] + + nc_var = dataset.variables.get('hdr_prpt_typ', None) + if nc_var: + self.hdr_prpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_irpt_typ', None) + if nc_var: + self.hdr_irpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_inst_typ', None) + if nc_var: + self.hdr_inst_typ =nc_var[:] + + #Observation data + self.hdr_sid = dataset['hdr_sid'][:] + self.obs_qty = np.array(dataset['obs_qty'][:]) + self.obs_hid = np.array(dataset['obs_hid'][:]) + self.obs_lvl = np.array(dataset['obs_lvl'][:]) + self.obs_hgt = np.array(dataset['obs_hgt'][:]) + self.obs_val = np.array(dataset['obs_val'][:]) + nc_var = dataset.variables.get('obs_vid', None) + if nc_var is None: + self.use_var_id = False + nc_var = dataset.variables.get('obs_gc', None) + else: + self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') + if nc_var: + self.obs_vid = np.array(nc_var[:]) + + self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') + + def save_ncfile(self, nc_filename): + met_data = self.get_point_data() + with nc.Dataset(nc_filename, 'w') as nc_dataset: + self.set_nc_data(nc_dataset) + return met_data + + def set_nc_data(self, nc_dataset): + return nc_point_obs.write_nc_data(nc_dataset, self) + + @staticmethod + def write_nc_file(nc_filename, point_obs): + with nc.Dataset(nc_filename, 'w') as nc_dataset: + nc_point_obs.set_nc_data(nc_dataset, point_obs) + + @staticmethod + def write_nc_data(nc_dataset, point_obs): + do_nothing = False + if 0 == point_obs.nhdr: + do_nothing = True + base_met_point_obs.info_msg("the header is empty") + if 0 == point_obs.nobs: + do_nothing = True + base_met_point_obs.info_msg("the observation data is empty") + if do_nothing: + print() + return + + # Set global attributes + nc_dataset.MET_Obs_version = "1.02" ; + nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" + + # Create dimensions + nc_dataset.createDimension('mxstr', 16) + nc_dataset.createDimension('mxstr2', 40) + nc_dataset.createDimension('mxstr3', 80) + nc_dataset.createDimension('nhdr', point_obs.nhdr) + nc_dataset.createDimension('nobs', point_obs.nobs) + #npbhdr = len(point_obs.hdr_prpt_typ) + if 0 < point_obs.npbhdr: + nc_dataset.createDimension('npbhdr', point_obs.npbhdr) + nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) + nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) + nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) + nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) + nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) + + type_for_string = 'S1' # np.byte + dims_hdr = ('nhdr',) + dims_obs = ('nobs',) + + # Create header and observation variables + var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) + var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) + var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) + var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) + + var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) + var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) + var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) + var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) + var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) + var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) + + if 0 == point_obs.npbhdr: + var_hdr_prpt_typ = None + var_hdr_irpt_typ = None + var_hdr_inst_typ = None + else: + dims_npbhdr = ('npbhdr',) + var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) + + var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) + var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) + var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) + var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) + var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) + + # Set variables + var_hdr_typ[:] = point_obs.hdr_typ[:] + var_hdr_sid[:] = point_obs.hdr_sid[:] + var_hdr_vld[:] = point_obs.hdr_vld[:] + var_hdr_lat[:] = point_obs.hdr_lat[:] + var_hdr_lon[:] = point_obs.hdr_lon[:] + var_hdr_elv[:] = point_obs.hdr_elv[:] + for i in range(0, point_obs.nhdr_typ): + for j in range(0, len(point_obs.hdr_typ_table[i])): + var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] + for i in range(0, point_obs.nhdr_sid): + for j in range(0, len(point_obs.hdr_sid_table[i])): + var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] + for i in range(0, point_obs.nhdr_vld): + for j in range(0, len(point_obs.hdr_vld_table[i])): + var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] + if 0 < point_obs.npbhdr: + var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] + var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] + var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] + + var_obs_qty[:] = point_obs.obs_qty[:] + var_obs_hid[:] = point_obs.obs_hid[:] + var_obs_vid[:] = point_obs.obs_vid[:] + var_obs_lvl[:] = point_obs.obs_lvl[:] + var_obs_hgt[:] = point_obs.obs_hgt[:] + var_obs_val[:] = point_obs.obs_val[:] + for i in range(0, point_obs.nobs_var): + for j in range(0, len(point_obs.obs_var_table[i])): + var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] + var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] + var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] + for i in range(0, point_obs.nobs_qty): + for j in range(0, len(point_obs.obs_qty_table[i])): + var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] + + # Set variable attributes + var_hdr_typ.long_name = "index of message type" + var_hdr_sid.long_name = "index of station identification" + var_hdr_vld.long_name = "index of valid time" + var_hdr_lat.long_name = "latitude" + var_hdr_lat.units = "degrees_north" + var_hdr_lon.long_name = "longitude" + var_hdr_lon.units = "degrees_east" + var_hdr_elv.long_name = "elevation" + var_hdr_elv.units = "meters above sea level (msl)" + + var_obs_qty.long_name = "index of quality flag" + var_obs_hid.long_name = "index of matching header data" + var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" + var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" + var_obs_hgt.long_name = "height in meters above sea level (msl)" + var_obs_val.long_name = "observation value" + var_hdr_typ_table.long_name = "message type" + var_hdr_sid_table.long_name = "station identification" + var_hdr_vld_table.long_name = "valid time" + var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" + var_obs_qty_table.long_name = "quality flag" + var_obs_var_table.long_name = "variable names" + var_obs_var_unit.long_name = "variable units" + var_obs_var_desc.long_name = "variable descriptions" + + +# This is a sample drived class +class sample_met_point_obs(met_point_obs): + + #@abstractmethod + def read_data(self, arg_map={}): + self.hdr_typ = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) + self.hdr_sid = np.array([ 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3, 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3 ]) + self.hdr_vld = np.array([ 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3, 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3 ]) + self.hdr_lat = np.array([ 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46., 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46. ]) + self.hdr_lon = np.array([ -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92., -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92. ]) + self.hdr_elv = np.array([ 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220. ]) + + self.obs_hid = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 25 ]) + self.obs_vid = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) + self.obs_qty = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) + self.obs_lvl = np.array([ 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000. ]) + self.obs_hgt = np.array([ 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2. ]) + self.obs_val = np.array([ 292., 292.5, 293., 293.5, 294., 294.5, 295., 295.5, 296., 292., 293.4, 293., 296., 294., 92., 92.5, 93., 93.5, 94., 94.5, 95., 95.5, 96., 92., 93.4, 93., 96., 94. ]) + + self.hdr_typ_table = [ "ADPSFC" ] + self.hdr_sid_table = [ "001", "002", "003", "004" ] + self.hdr_vld_table = [ + "20120409_115000", "20120409_115500", "20120409_120100", "20120409_120500", "20120409_121000", + "20120409_120000" ] + self.obs_var_table = [ "TMP", "RH" ] + self.obs_qty_table = [ "NA" ] + +def convert_point_data(point_data, check_all_records=False, input_type='csv'): + tmp_point_data = {} + if 'csv' == input_type: + csv_point_data = csv_point_obs(point_data) + csv_point_data.check_csv_point_data(check_all_records) + tmp_point_data = csv_point_data.get_point_data() + else: + base_met_point_obs.error_msg('Not supported input type: {input_type}') + return tmp_point_data + +def main(): + args = {} # or args = [] + point_obs_data = sample_met_point_obs() + point_obs_data.read_data(args) + met_point_data = point_obs_data.get_point_data() + + point_obs_data.print_point_data(met_point_data, print_subset=False) + +def main_nc(argv): + if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: + netcdf_filename = argv[1] + tmp_nc_name = 'tmp_met_point.nc' + point_obs_data = nc_point_obs() + point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) + met_point_data = point_obs_data.save_ncfile(tmp_nc_name) + print(f'{base_met_point_obs.get_prompt()} saved met_point_data to {tmp_nc_name}') + met_point_data['met_point_data'] = point_obs_data + + if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: + met_point_obs.print_point_data(met_point_data) + + +if __name__ == '__main__': + main() + print('Done python scripot') diff --git a/scripts/python/met_point_obs.py b/scripts/python/met_point_obs.py deleted file mode 100755 index fb108705fd..0000000000 --- a/scripts/python/met_point_obs.py +++ /dev/null @@ -1,660 +0,0 @@ -#!/usr/bin/env python3 -''' -Created on Nov 10, 2021 - -@author: hsoh - -- This is the base class and the customized script should extend the met_point_obs. -- The customized script (for example "custom_reader") must implement - "def read_data(self, args)" which fills the array variables at __init__(). -- The args can be 1) single string argument, 2) the list of arguments, - or 3) the dictionary of arguments. -- A python objects, met_point_data, must set: - + "point_obs_data" is an optional to use custom python EXE. - It's a python instance which processes the point observation data -- The customized script is expected to include following codes: - - # prepare arguments for the customized script - args = {'input', sys.argv[1]} # or args = [] - point_obs_data = custom_reader() - point_obs_data.read_data(args) - met_point_data = point_obs_data.get_point_data() - -''' - -import os -from abc import ABC, abstractmethod -import numpy as np - -COUNT_SHOW = 30 - -def get_prompt(): - return " python:" - -def met_is_python_prefix(user_cmd): - return user_cmd.startswith(base_met_point_obs.python_prefix) - - -class base_met_point_obs(object): - ''' - classdocs - ''' - ERROR_P = " ==PYTHON_ERROR==" - INFO_P = " ==PYTHON_INFO==" - - python_prefix = 'PYTHON_POINT_USER' - - FILL_VALUE = -9999. - - def __init__(self, use_var_id=True): - ''' - Constructor - ''' - self.count_info = "" - self.input_name = None - self.ignore_input_file = False - self.use_var_id = use_var_id # True if variable index, False if GRIB code - self.error_msg = "" - self.has_error = False - - # Header - self.nhdr = 0 - self.npbhdr = 0 - self.nhdr_typ = 0 # type table - self.nhdr_sid = 0 # station_id table - self.nhdr_vld = 0 # valid time strings - self.hdr_typ = [] # (nhdr) integer - self.hdr_sid = [] # (nhdr) integer - self.hdr_vld = [] # (nhdr) integer - self.hdr_lat = [] # (nhdr) float - self.hdr_lon = [] # (nhdr) float - self.hdr_elv = [] # (nhdr) float - self.hdr_typ_table = [] # (nhdr_typ, mxstr2) string - self.hdr_sid_table = [] # (nhdr_sid, mxstr2) string - self.hdr_vld_table = [] # (nhdr_vld, mxstr) string - - #Observation data - self.nobs = 0 - self.nobs_qty = 0 - self.nobs_var = 0 - self.obs_qty = [] # (nobs_qty) integer, index of self.obs_qty_table - self.obs_hid = [] # (nobs) integer - self.obs_vid = [] # (nobs) integer, veriable index from self.obs_var_table or GRIB code - self.obs_lvl = [] # (nobs) float - self.obs_hgt = [] # (nobs) float - self.obs_val = [] # (nobs) float - self.obs_qty_table = [] # (nobs_qty, mxstr) string - self.obs_var_table = [] # (nobs_var, mxstr2) string, required if self.use_var_id is True - self.obs_var_unit = [] # (nobs_var, mxstr2) string, optional if self.use_var_id is True - self.obs_var_desc = [] # (nobs_var, mxstr3) string, optional if self.use_var_id is True - - # Optional variables for PREPBUFR, not supported yet - self.hdr_prpt_typ = [] # optional - self.hdr_irpt_typ = [] # optional - self.hdr_inst_typ = [] # optional - - def add_error_msg(self, error_msg): - self.has_error = True - self.log_error_msg(error_msg) - if 0 == len(self.error_msg): - self.error_msg = error_msg - else: - self.error_msg = "{m1}\n{m2}".format(m1=self.error_msg, m2=error_msg) - - def add_error_msgs(self, error_msgs): - self.has_error = True - for error_msg in error_msgs: - self.add_error_msg(error_msg) - - def check_data_member_float(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (float)".format(v=var_name)) - elif isinstance(local_var, list): - if isinstance(local_var[0], str) and not self.is_number(local_var[0]): - self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int or float only)".format( - n=var_name, v=local_var[0])) - elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], (int, float)): - self.add_error_msg("Not supported data type ({t}) for {v}[0] (int or float only)".format( - v=var_name, t=type(local_var[0]))) - elif not self.is_numpy_array(local_var): - self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( - v=var_name, t=type(local_var))) - - def check_data_member_int(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (int)".format(v=var_name)) - elif isinstance(local_var, list): - if isinstance(local_var[0], str) and not self.is_number(local_var[0]): - self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int only)".format( - n=var_name, v=local_var[0])) - elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], int): - self.add_error_msg("Not supported data type ({t}) for {v}[0] (int only)".format( - v=var_name, t=type(local_var[0]))) - elif not self.is_numpy_array(local_var): - self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( - v=var_name, t=type(local_var))) - - def check_data_member_string(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (string)".format(v=var_name)) - elif not isinstance(local_var, (list)): - self.add_error_msg("Not supported data type ({t}) for {v} (list)".format( - v=var_name, t=type(local_var))) - - def check_point_data(self): - if not self.ignore_input_file and self.input_name is not None and not os.path.exists(self.input_name): - self.add_error_msg('The netcdf input {f} does not exist'.format(f=self.input_name)) - else: - self.check_data_member_int(self.hdr_typ,'hdr_typ') - self.check_data_member_int(self.hdr_sid,'hdr_sid') - self.check_data_member_int(self.hdr_vld,'hdr_vld') - self.check_data_member_float(self.hdr_lat,'hdr_lat') - self.check_data_member_float(self.hdr_lon,'hdr_lon') - self.check_data_member_float(self.hdr_elv,'hdr_elv') - self.check_data_member_string(self.hdr_typ_table,'hdr_typ_table') - self.check_data_member_string(self.hdr_sid_table,'hdr_sid_table') - self.check_data_member_string(self.hdr_vld_table,'hdr_vld_table') - - self.check_data_member_int(self.obs_qty,'obs_qty') - self.check_data_member_int(self.obs_hid,'obs_hid') - self.check_data_member_int(self.obs_vid,'obs_vid') - self.check_data_member_float(self.obs_lvl,'obs_lvl') - self.check_data_member_float(self.obs_hgt,'obs_hgt') - self.check_data_member_float(self.obs_val,'obs_val') - self.check_data_member_string(self.obs_qty_table,'obs_qty_table') - if self.use_var_id: - self.check_data_member_string(self.obs_var_table,'obs_var_table') - - def convert_to_numpy(self, value_list): - return np.array(value_list) - - def dump(self): - base_met_point_obs.print_point_data(self.get_point_data()) - - def get_count_string(self): - return f' nobs={self.nobs} nhdr={self.nhdr} ntyp={self.nhdr_typ} nsid={self.nhdr_sid} nvld={self.nhdr_vld} nqty={self.nobs_qty} nvar={self.nobs_var}' - - def get_point_data(self): - if self.nhdr <= 0: - self.nhdr = len(self.hdr_lat) - if self.nobs <= 0: - self.nobs = len(self.obs_val) - if self.nhdr_typ <= 0: - self.nhdr_typ = len(self.hdr_typ_table) - if self.nhdr_sid <= 0: - self.nhdr_sid = len(self.hdr_sid_table) - if self.nhdr_vld <= 0: - self.nhdr_vld = len(self.hdr_vld_table) - if self.npbhdr <= 0: - self.npbhdr = len(self.hdr_prpt_typ) - if self.nobs_qty <= 0: - self.nobs_qty = len(self.obs_qty_table) - if self.nobs_var <= 0: - self.nobs_var = len(self.obs_var_table) - self.check_point_data() - - if not self.is_numpy_array(self.hdr_typ): - self.hdr_typ = self.convert_to_numpy(self.hdr_typ) - if not self.is_numpy_array(self.hdr_sid): - self.hdr_sid = self.convert_to_numpy(self.hdr_sid) - if not self.is_numpy_array(self.hdr_vld): - self.hdr_vld = self.convert_to_numpy(self.hdr_vld) - if not self.is_numpy_array(self.hdr_lat): - self.hdr_lat = self.convert_to_numpy(self.hdr_lat) - if not self.is_numpy_array(self.hdr_lon): - self.hdr_lon = self.convert_to_numpy(self.hdr_lon) - if not self.is_numpy_array(self.hdr_elv): - self.hdr_elv = self.convert_to_numpy(self.hdr_elv) - - if not self.is_numpy_array(self.obs_qty): - self.obs_qty = self.convert_to_numpy(self.obs_qty) - if not self.is_numpy_array(self.obs_hid): - self.obs_hid = self.convert_to_numpy(self.obs_hid) - if not self.is_numpy_array(self.obs_vid): - self.obs_vid = self.convert_to_numpy(self.obs_vid) - if not self.is_numpy_array(self.obs_lvl): - self.obs_lvl = self.convert_to_numpy(self.obs_lvl) - if not self.is_numpy_array(self.obs_hgt): - self.obs_hgt = self.convert_to_numpy(self.obs_hgt) - if not self.is_numpy_array(self.obs_val): - self.obs_val = self.convert_to_numpy(self.obs_val) - - self.count_info = self.get_count_string() - self.met_point_data = self - return self.__dict__ - - def is_number(self, num_str): - return num_str.replace('-','1').replace('+','2').replace('.','3').isdigit() - - def is_numpy_array(self, var): - return isinstance(var, np.ndarray) - - def log_error_msg(self, err_msg): - base_met_point_obs.error_msg(err_msg) - - def log_error(self, err_msgs): - print(self.ERROR_P) - for err_line in err_msgs.split('\n'): - self.log_error_msg(err_line) - print(self.ERROR_P) - - def log_info(self, info_msg): - base_met_point_obs.info_msg(info_msg) - - def put_data(self, point_obs_dict): - self.use_var_id = point_obs_dict['use_var_id'] - self.hdr_typ = point_obs_dict['hdr_typ'] - self.hdr_sid = point_obs_dict['hdr_sid'] - self.hdr_vld = point_obs_dict['hdr_vld'] - self.hdr_lat = point_obs_dict['hdr_lat'] - self.hdr_lon = point_obs_dict['hdr_lon'] - self.hdr_elv = point_obs_dict['hdr_elv'] - self.hdr_typ_table = point_obs_dict['hdr_typ_table'] - self.hdr_sid_table = point_obs_dict['hdr_sid_table'] - self.hdr_vld_table = point_obs_dict['hdr_vld_table'] - - #Observation data - self.obs_qty = point_obs_dict['obs_qty'] - self.obs_hid = point_obs_dict['obs_hid'] - self.obs_lvl = point_obs_dict['obs_lvl'] - self.obs_hgt = point_obs_dict['obs_hgt'] - self.obs_val = point_obs_dict['obs_val'] - self.obs_vid = point_obs_dict['obs_vid'] - self.obs_var_table = point_obs_dict['obs_var_table'] - self.obs_qty_table = point_obs_dict['obs_qty_table'] - po_array = point_obs_dict.get('obs_unit', None) - if po_array is not None: - self.obs_var_unit = po_array - po_array = point_obs_dict.get('obs_desc', None) - if po_array is not None: - self.obs_var_desc = po_array - - po_array = point_obs_dict.get('hdr_prpt_typ', None) - if po_array is not None: - self.hdr_prpt_typ = po_array - po_array = point_obs_dict.get('hdr_irpt_typ', None) - if po_array is not None: - self.hdr_irpt_typ = po_array - po_array = point_obs_dict.get('hdr_inst_typ', None) - if po_array is not None: - self.hdr_inst_typ = po_array - - @staticmethod - def error_msg(msg): - print(f'{get_prompt()} {base_met_point_obs.ERROR_P} {msg}') - - @staticmethod - def info_msg(msg): - print(f'{get_prompt()} {base_met_point_obs.INFO_P} {msg}') - - @staticmethod - def get_python_script(arg_value): - return arg_value[len(met_point_obs.python_prefix)+1:] - - @staticmethod - def is_python_script(arg_value): - return arg_value.startswith(met_point_obs.python_prefix) - - @staticmethod - def print_data(key, data_array, show_count=COUNT_SHOW): - if isinstance(data_array, list): - data_len = len(data_array) - if show_count >= data_len: - print(" {k:10s}: {v}".format(k=key, v= data_array)) - else: - end_offset = int(show_count/2) - print(" {k:10s}: count={v}".format(k=key, v=data_len)) - print(" {k:10s}[0:{o}] {v}".format(k=key, v=data_array[:end_offset], o=end_offset)) - print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v='...', s=end_offset+1, e=data_len-end_offset-1)) - print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v= data_array[-end_offset:], s=(data_len-end_offset), e=(data_len-1))) - else: - print(" {k:10s}: {v}".format(k=key, v= data_array)) - - @staticmethod - def print_point_data(met_point_data, print_subset=True): - print(' === MET point data by python embedding ===') - if print_subset: - met_point_obs.print_data('nhdr',met_point_data['nhdr']) - met_point_obs.print_data('nobs',met_point_data['nobs']) - met_point_obs.print_data('use_var_id',met_point_data['use_var_id']) - met_point_obs.print_data('hdr_typ',met_point_data['hdr_typ']) - met_point_obs.print_data('hdr_typ_table',met_point_data['hdr_typ_table']) - met_point_obs.print_data('hdr_sid',met_point_data['hdr_sid']) - met_point_obs.print_data('hdr_sid_table',met_point_data['hdr_sid_table']) - met_point_obs.print_data('hdr_vld',met_point_data['hdr_vld']) - met_point_obs.print_data('hdr_vld_table',met_point_data['hdr_vld_table']) - met_point_obs.print_data('hdr_lat',met_point_data['hdr_lat']) - met_point_obs.print_data('hdr_lon',met_point_data['hdr_lon']) - met_point_obs.print_data('hdr_elv',met_point_data['hdr_elv']) - met_point_obs.print_data('obs_hid',met_point_data['obs_hid']) - met_point_obs.print_data('obs_vid',met_point_data['obs_vid']) - met_point_obs.print_data('obs_var_table',met_point_data['obs_var_table']) - met_point_obs.print_data('obs_qty',met_point_data['obs_qty']) - met_point_obs.print_data('obs_qty_table',met_point_data['obs_qty_table']) - met_point_obs.print_data('obs_lvl',met_point_data['obs_lvl']) - met_point_obs.print_data('obs_hgt',met_point_data['obs_hgt']) - met_point_obs.print_data('obs_val',met_point_data['obs_val']) - else: - print('All',met_point_data) - print(" nhdr: ",met_point_data['nhdr']) - print(" nobs: ",met_point_data['nobs']) - print(' use_var_id: ',met_point_data['use_var_id']) - print(' hdr_typ: ',met_point_data['hdr_typ']) - print('hdr_typ_table: ',met_point_data['hdr_typ_table']) - print(' hdr_sid: ',met_point_data['hdr_sid']) - print('hdr_sid_table: ',met_point_data['hdr_sid_table']) - print(' hdr_vld: ',met_point_data['hdr_vld']) - print('hdr_vld_table: ',met_point_data['hdr_vld_table']) - print(' hdr_lat: ',met_point_data['hdr_lat']) - print(' hdr_lon: ',met_point_data['hdr_lon']) - print(' hdr_elv: ',met_point_data['hdr_elv']) - print(' obs_hid: ',met_point_data['obs_hid']) - print(' obs_vid: ',met_point_data['obs_vid']) - print('obs_var_table: ',met_point_data['obs_var_table']) - print(' obs_qty: ',met_point_data['obs_qty']) - print('obs_qty_table: ',met_point_data['obs_qty_table']) - print(' obs_lvl: ',met_point_data['obs_lvl']) - print(' obs_hgt: ',met_point_data['obs_hgt']) - print(' obs_val: ',met_point_data['obs_val']) - - print(' === MET point data by python embedding ===') - - -class csv_point_obs(ABC, base_met_point_obs): - - def __init__(self, point_data): - self.point_data = point_data - super(csv_point_obs, self).__init__() - - self.obs_cnt = obs_cnt = len(point_data) - self.obs_qty = [ 0 for _ in range(0, obs_cnt) ] # (nobs_qty) integer, index of self.obs_qty_table - self.obs_hid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer - self.obs_vid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer, veriable index from self.obs_var_table or GRIB code - self.obs_lvl = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - self.obs_hgt = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - self.obs_val = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - - self.convert_point_data() - - def check_csv_record(self, csv_point_data, index): - error_msgs = [] - # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - # dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'} - if 11 > len(csv_point_data): - error_msgs.append("{i}-th data: missing columns. should be 11 columns, not {c} columns".format( - i=index, c=len(csv_point_data))) - elif 11 < len(csv_point_data): - print("{i}-th data: ignore after 11-th columns out of {c} columns".format( - i=index, c=len(csv_point_data))) - if not isinstance(csv_point_data[0], str): - error_msgs.append("{i}-th data: message_type is not string".format(i=index)) - if not isinstance(csv_point_data[1], str): - error_msgs.append("{i}-th data: station_id is not string".format(i=index)) - if not isinstance(csv_point_data[2], str): - error_msgs.append("{i}-th data: valid_time is not string".format(i=index)) - if isinstance(csv_point_data[3], str): - error_msgs.append("{i}-th data: latitude can not be a string".format(i=index)) - elif csv_point_data[3] < -90.0 or csv_point_data[3] > 90.0: - error_msgs.append("{i}-th data: latitude ({l}) is out of range".format(i=index, l=csv_point_data[3])) - if isinstance(csv_point_data[4], str): - error_msgs.append("{i}-th data: longitude can not be a string".format(i=index)) - elif csv_point_data[4] < -180.0 or csv_point_data[4] > 360.0: - error_msgs.append("{i}-th data: longitude ({l}) is out of range".format(i=index, l=csv_point_data[4])) - if not isinstance(csv_point_data[6], str): - error_msgs.append("{i}-th data: grib_code/var_name is not string".format(i=index)) - if not isinstance(csv_point_data[9], str): - error_msgs.append("{i}-th data: quality_mark is not string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[5]) - if is_string and not is_num: - error_msgs.append("{i}-th data: elevation: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[7]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_level: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[8]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_height: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[10]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_value: only NA is accepted as string".format(i=index)) - return error_msgs - - def check_csv_point_data(self, all_records=False): - if 0 == len(self.point_data): - self.add_error_msg("No data!") - elif all_records: - data_idx = 0 - for csv_point_data in self.point_data: - data_idx += 1 - error_messages = self.check_csv_record(csv_point_data, data_idx) - if len(error_messages) > 0: - self.add_error_msgs(error_messages) - else: - error_messages = self.check_csv_record(self.point_data[0], index=1) - if len(error_messages) > 0: - self.add_error_msgs(error_messages) - if 1 < len(self.point_data): - error_messages = self.check_csv_record(self.point_data[-1], index=len(self.point_data)) - if len(error_messages) > 0: - self.add_error_msgs(error_messages) - - def convert_point_data(self): - hdr_cnt = hdr_typ_cnt = hdr_sid_cnt = hdr_vld_cnt = 0 - var_name_cnt = qc_cnt = 0 - - hdr_map = {} - hdr_typ_map = {} - hdr_sid_map = {} - hdr_vld_map = {} - obs_var_map = {} - obs_qty_map = {} - self.use_var_id = not self.is_grib_code() - - index = 0 - #names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - for csv_point_record in self.point_data: - # Build header map. - hdr_typ_str = csv_point_record[0] - hdr_typ_idx = hdr_typ_map.get(hdr_typ_str,-1) - if hdr_typ_idx < 0: - hdr_typ_idx = hdr_typ_cnt - hdr_typ_map[hdr_typ_str] = hdr_typ_idx - hdr_typ_cnt += 1 - - hdr_sid_str = csv_point_record[1] - hdr_sid_idx = hdr_sid_map.get(hdr_sid_str,-1) - if hdr_sid_idx < 0: - hdr_sid_idx = hdr_sid_cnt - hdr_sid_map[hdr_sid_str] = hdr_sid_idx - hdr_sid_cnt += 1 - - hdr_vld_str = csv_point_record[2] - hdr_vld_idx = hdr_vld_map.get(hdr_vld_str,-1) - if hdr_vld_idx < 0: - hdr_vld_idx = hdr_vld_cnt - hdr_vld_map[hdr_vld_str] = hdr_vld_idx - hdr_vld_cnt += 1 - - lat = csv_point_record[3] - lon = csv_point_record[4] - elv = self.get_num_value(csv_point_record[5] ) - hdr_key = (hdr_typ_idx,hdr_sid_idx,hdr_vld_idx,lat,lon,elv) - hdr_idx = hdr_map.get(hdr_key,-1) - if hdr_idx < 0: - hdr_idx = hdr_cnt - hdr_map[hdr_key] = hdr_idx - hdr_cnt += 1 - - var_id_str = csv_point_record[6] - if self.use_var_id: - var_id = obs_var_map.get(var_id_str,-1) - if var_id < 0: - var_id = var_name_cnt - obs_var_map[var_id_str] = var_id - var_name_cnt += 1 - else: - var_id = int(var_id_str) - - qc_str = csv_point_record[9] - qc_id = obs_qty_map.get(qc_str,-1) - if qc_id < 0: - qc_id = qc_cnt - obs_qty_map[qc_str] = qc_id - qc_cnt += 1 - - # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - self.obs_vid[index] = var_id - self.obs_hid[index] = hdr_idx - self.obs_lvl[index] = self.get_num_value(csv_point_record[7]) - self.obs_hgt[index] = self.get_num_value(csv_point_record[8]) - self.obs_val[index] = self.get_num_value(csv_point_record[10]) - self.obs_qty[index] = qc_id - - index += 1 - - self.nhdr = hdr_cnt - self.nhdr_typ = hdr_typ_cnt - self.nhdr_sid = hdr_sid_cnt - self.nhdr_vld = hdr_vld_cnt - self.nobs_var = var_name_cnt - self.nobs_qty = qc_cnt - - # Fill header array and table array based on the map - self.hdr_typ = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_sid = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_vld = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_lat = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - self.hdr_lon = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - self.hdr_elv = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - for key, idx in hdr_map.items(): - self.hdr_typ[idx] = key[0] - self.hdr_sid[idx] = key[1] - self.hdr_vld[idx] = key[2] - self.hdr_lat[idx] = key[3] - self.hdr_lon[idx] = key[4] - self.hdr_elv[idx] = key[5] - - self.hdr_typ_table = [ "" for _ in range(0, hdr_typ_cnt) ] - self.hdr_sid_table = [ "" for _ in range(0, hdr_sid_cnt) ] - self.hdr_vld_table = [ "" for _ in range(0, hdr_vld_cnt) ] - self.obs_qty_table = [ "" for _ in range(0, qc_cnt) ] - self.obs_var_table = [ "" for _ in range(0, var_name_cnt) ] - for key, idx in hdr_typ_map.items(): - self.hdr_typ_table[idx] = key - for key, idx in hdr_sid_map.items(): - self.hdr_sid_table[idx] = key - for key, idx in hdr_vld_map.items(): - self.hdr_vld_table[idx] = key - for key, idx in obs_qty_map.items(): - self.obs_qty_table[idx] = key - for key, idx in obs_var_map.items(): - self.obs_var_table[idx] = key - - def get_num_value(self, column_value): - num_value = column_value - if isinstance(column_value, str): - if self.is_number(column_value): - num_value = float(column_value) - else: - num_value = self.FILL_VALUE - if column_value.lower() != 'na' and column_value.lower() != 'n/a': - self.log_info(f'{column_value} is not a number, converted to the missing value') - return num_value - - def is_grib_code(self): - grib_code = True - for _point_data in self.point_data: - if isinstance(_point_data[6], int): - continue - elif isinstance(_point_data[6], str) and not _point_data[6].isdecimal(): - grib_code = False - break; - return grib_code - - def is_num_string(self, column_value): - is_string = isinstance(column_value, str) - if is_string: - is_num = True if self.is_number(column_value) or column_value.lower() == 'na' or column_value.lower() == 'n/a' else False - else: - is_num = True - return is_string, is_num - - -class met_point_obs(ABC, base_met_point_obs): - - MET_ENV_RUN = 'MET_FORCE_TO_RUN' - - @abstractmethod - def read_data(self, args): - # args can be input_file_name, list, or dictionary - # - The variables at __init__ should be filled as python list or numpy array - # - set self.input_name - # - # Here is a template - ''' - if isinstance(args, dict): - in_filename = args.get('in_name',None) - elif isinstance(args, list): - in_filename = args[0] - else: - in_filename = args - self.input_name = in_filename - ''' - pass - - @staticmethod - def get_prompt(): - return get_prompt() - - @staticmethod - def is_python_prefix(user_cmd): - return user_cmd.startswith(base_met_point_obs.python_prefix) - - -# This is a sample drived class -class sample_met_point_obs(met_point_obs): - - #@abstractmethod - def read_data(self, arg_map={}): - self.hdr_typ = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) - self.hdr_sid = np.array([ 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3, 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3 ]) - self.hdr_vld = np.array([ 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3, 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3 ]) - self.hdr_lat = np.array([ 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46., 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46. ]) - self.hdr_lon = np.array([ -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92., -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92. ]) - self.hdr_elv = np.array([ 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220. ]) - - self.obs_hid = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 25 ]) - self.obs_vid = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) - self.obs_qty = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) - self.obs_lvl = np.array([ 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000. ]) - self.obs_hgt = np.array([ 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2. ]) - self.obs_val = np.array([ 292., 292.5, 293., 293.5, 294., 294.5, 295., 295.5, 296., 292., 293.4, 293., 296., 294., 92., 92.5, 93., 93.5, 94., 94.5, 95., 95.5, 96., 92., 93.4, 93., 96., 94. ]) - - self.hdr_typ_table = [ "ADPSFC" ] - self.hdr_sid_table = [ "001", "002", "003", "004" ] - self.hdr_vld_table = [ - "20120409_115000", "20120409_115500", "20120409_120100", "20120409_120500", "20120409_121000", - "20120409_120000" ] - self.obs_var_table = [ "TMP", "RH" ] - self.obs_qty_table = [ "NA" ] - -def convert_point_data(point_data, check_all_records=False, input_type='csv'): - tmp_point_data = {} - if 'csv' == input_type: - csv_point_data = csv_point_obs(point_data) - csv_point_data.check_csv_point_data(check_all_records) - tmp_point_data = csv_point_data.get_point_data() - else: - base_met_point_obs.error_msg('Not supported input type: {input_type}') - return tmp_point_data - -def main(): - args = {} # or args = [] - point_obs_data = sample_met_point_obs() - point_obs_data.read_data(args) - met_point_data = point_obs_data.get_point_data() - - point_obs_data.print_point_data(met_point_data, print_subset=False) - -if __name__ == '__main__': - main() - print('Done python scripot') diff --git a/scripts/python/met_point_obs_nc.py b/scripts/python/met_point_obs_nc.py deleted file mode 100644 index e6680c0689..0000000000 --- a/scripts/python/met_point_obs_nc.py +++ /dev/null @@ -1,281 +0,0 @@ -#!/usr/bin/env python3 - -''' -Separated from read_met_point_obs on Feb 09, 2023 - -@author: hsoh - -This script reads the MET point observation NetCDF file like MET tools do. -''' - -import os -import sys -from datetime import datetime -import numpy as np -import netCDF4 as nc - -from met_point_obs import met_point_obs, base_met_point_obs, get_prompt - -DO_PRINT_DATA = False -ARG_PRINT_DATA = 'show_data' - -# Note: caller should import netCDF4 -# the argements nc_group(dataset) and nc_var should not be None -class nc_tools(): - - met_missing = -99999999. - - @staticmethod - def get_num_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_var[:] - - @staticmethod - def get_ncbyte_array_to_str(nc_var): - nc_str_data = nc_var[:] - if nc_var.datatype.name == 'bytes8': - nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] - return nc_str_data - - @staticmethod - def get_string_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) - - -class nc_point_obs(met_point_obs): - - # args should be string, list, or dictionary - def get_nc_filename(self, args): - nc_filename = None - if isinstance(args, dict): - nc_filename = args.get('nc_name',None) - elif isinstance(args, list): - nc_filename = args[0] - elif args != ARG_PRINT_DATA: - nc_filename = args - - return nc_filename - - def read_data(self, nc_filename): - if nc_filename is None: - self.log_error_msg("The input NetCDF filename is missing") - elif not os.path.exists(nc_filename): - self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") - else: - dataset = nc.Dataset(nc_filename, 'r') - - attr_name = 'use_var_id' - use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" - self.use_var_id = use_var_id_str.lower() == 'true' - - # Header - self.hdr_typ = dataset['hdr_typ'][:] - self.hdr_sid = dataset['hdr_sid'][:] - self.hdr_vld = dataset['hdr_vld'][:] - self.hdr_lat = dataset['hdr_lat'][:] - self.hdr_lon = dataset['hdr_lon'][:] - self.hdr_elv = dataset['hdr_elv'][:] - self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') - self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') - self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') - - nc_var = dataset.variables.get('obs_unit', None) - if nc_var: - self.obs_var_unit = nc_var[:] - nc_var = dataset.variables.get('obs_desc', None) - if nc_var: - self.obs_var_desc = nc_var[:] - - nc_var = dataset.variables.get('hdr_prpt_typ', None) - if nc_var: - self.hdr_prpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_irpt_typ', None) - if nc_var: - self.hdr_irpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_inst_typ', None) - if nc_var: - self.hdr_inst_typ =nc_var[:] - - #Observation data - self.hdr_sid = dataset['hdr_sid'][:] - self.obs_qty = np.array(dataset['obs_qty'][:]) - self.obs_hid = np.array(dataset['obs_hid'][:]) - self.obs_lvl = np.array(dataset['obs_lvl'][:]) - self.obs_hgt = np.array(dataset['obs_hgt'][:]) - self.obs_val = np.array(dataset['obs_val'][:]) - nc_var = dataset.variables.get('obs_vid', None) - if nc_var is None: - self.use_var_id = False - nc_var = dataset.variables.get('obs_gc', None) - else: - self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') - if nc_var: - self.obs_vid = np.array(nc_var[:]) - - self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') - - def save_ncfile(self, nc_filename): - met_data = self.get_point_data() - with nc.Dataset(nc_filename, 'w') as nc_dataset: - self.set_nc_data(nc_dataset) - return met_data - - def set_nc_data(self, nc_dataset): - return nc_point_obs.write_nc_data(nc_dataset, self) - - @staticmethod - def write_nc_file(nc_filename, point_obs): - with nc.Dataset(nc_filename, 'w') as nc_dataset: - nc_point_obs.set_nc_data(nc_dataset, point_obs) - - @staticmethod - def write_nc_data(nc_dataset, point_obs): - do_nothing = False - if 0 == point_obs.nhdr: - do_nothing = True - base_met_point_obs.info_msg("the header is empty") - if 0 == point_obs.nobs: - do_nothing = True - base_met_point_obs.info_msg("the observation data is empty") - if do_nothing: - print() - return - - # Set global attributes - nc_dataset.MET_Obs_version = "1.02" ; - nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" - - # Create dimensions - nc_dataset.createDimension('mxstr', 16) - nc_dataset.createDimension('mxstr2', 40) - nc_dataset.createDimension('mxstr3', 80) - nc_dataset.createDimension('nhdr', point_obs.nhdr) - nc_dataset.createDimension('nobs', point_obs.nobs) - #npbhdr = len(point_obs.hdr_prpt_typ) - if 0 < point_obs.npbhdr: - nc_dataset.createDimension('npbhdr', point_obs.npbhdr) - nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) - nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) - nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) - nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) - nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) - - type_for_string = 'S1' # np.byte - dims_hdr = ('nhdr',) - dims_obs = ('nobs',) - - # Create header and observation variables - var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) - var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) - var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) - var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) - - var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) - var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) - var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) - var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) - var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) - var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) - - if 0 == point_obs.npbhdr: - var_hdr_prpt_typ = None - var_hdr_irpt_typ = None - var_hdr_inst_typ = None - else: - dims_npbhdr = ('npbhdr',) - var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) - - var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) - var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) - var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) - var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) - var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) - - # Set variables - var_hdr_typ[:] = point_obs.hdr_typ[:] - var_hdr_sid[:] = point_obs.hdr_sid[:] - var_hdr_vld[:] = point_obs.hdr_vld[:] - var_hdr_lat[:] = point_obs.hdr_lat[:] - var_hdr_lon[:] = point_obs.hdr_lon[:] - var_hdr_elv[:] = point_obs.hdr_elv[:] - for i in range(0, point_obs.nhdr_typ): - for j in range(0, len(point_obs.hdr_typ_table[i])): - var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] - for i in range(0, point_obs.nhdr_sid): - for j in range(0, len(point_obs.hdr_sid_table[i])): - var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] - for i in range(0, point_obs.nhdr_vld): - for j in range(0, len(point_obs.hdr_vld_table[i])): - var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] - if 0 < point_obs.npbhdr: - var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] - var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] - var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] - - var_obs_qty[:] = point_obs.obs_qty[:] - var_obs_hid[:] = point_obs.obs_hid[:] - var_obs_vid[:] = point_obs.obs_vid[:] - var_obs_lvl[:] = point_obs.obs_lvl[:] - var_obs_hgt[:] = point_obs.obs_hgt[:] - var_obs_val[:] = point_obs.obs_val[:] - for i in range(0, point_obs.nobs_var): - for j in range(0, len(point_obs.obs_var_table[i])): - var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] - var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] - var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] - for i in range(0, point_obs.nobs_qty): - for j in range(0, len(point_obs.obs_qty_table[i])): - var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] - - # Set variable attributes - var_hdr_typ.long_name = "index of message type" - var_hdr_sid.long_name = "index of station identification" - var_hdr_vld.long_name = "index of valid time" - var_hdr_lat.long_name = "latitude" - var_hdr_lat.units = "degrees_north" - var_hdr_lon.long_name = "longitude" - var_hdr_lon.units = "degrees_east" - var_hdr_elv.long_name = "elevation" - var_hdr_elv.units = "meters above sea level (msl)" - - var_obs_qty.long_name = "index of quality flag" - var_obs_hid.long_name = "index of matching header data" - var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" - var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" - var_obs_hgt.long_name = "height in meters above sea level (msl)" - var_obs_val.long_name = "observation value" - var_hdr_typ_table.long_name = "message type" - var_hdr_sid_table.long_name = "station identification" - var_hdr_vld_table.long_name = "valid time" - var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" - var_obs_qty_table.long_name = "quality flag" - var_obs_var_table.long_name = "variable names" - var_obs_var_unit.long_name = "variable units" - var_obs_var_desc.long_name = "variable descriptions" - - -def main(argv): - if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: - netcdf_filename = argv[1] - tmp_nc_name = 'tmp_met_point.nc' - point_obs_data = nc_point_obs() - point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) - met_point_data = point_obs_data.save_ncfile(tmp_nc_name) - print(f'{get_prompt()} saved met_point_data to {tmp_nc_name}') - met_point_data['met_point_data'] = point_obs_data - - if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: - met_point_obs.print_point_data(met_point_data) - -if __name__ == '__main__': - start_time = datetime.now() - main(sys.argv) - run_time = datetime.now() - start_time - print(f'{get_prompt()} Done python script {sys.argv[0]} took {run_time}') diff --git a/data/wrappers/Makefile.am b/scripts/python/pyembed/Makefile.am similarity index 90% rename from data/wrappers/Makefile.am rename to scripts/python/pyembed/Makefile.am index deb919438e..ca8a3cb66e 100644 --- a/data/wrappers/Makefile.am +++ b/scripts/python/pyembed/Makefile.am @@ -18,18 +18,19 @@ SUBDIRS = -wrappersdir = $(pkgdatadir)/wrappers +pyembeddir = $(pkgdatadir)/python/pyembed -wrappers_DATA = \ - set_python_env.py \ +pyembed_DATA = \ + python_embedding.py \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ write_tmp_mpr.py -EXTRA_DIST = ${wrappers_DATA} +EXTRA_DIST = ${pyembed_DATA} MAINTAINERCLEANFILES = Makefile.in diff --git a/data/wrappers/Makefile.in b/scripts/python/pyembed/Makefile.in similarity index 95% rename from data/wrappers/Makefile.in rename to scripts/python/pyembed/Makefile.in index da04b2b2a0..bd0848e94e 100644 --- a/data/wrappers/Makefile.in +++ b/scripts/python/pyembed/Makefile.in @@ -88,7 +88,7 @@ PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ -subdir = data/wrappers +subdir = scripts/python/pyembed ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ @@ -152,8 +152,8 @@ am__uninstall_files_from_dir = { \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } -am__installdirs = "$(DESTDIR)$(wrappersdir)" -DATA = $(wrappers_DATA) +am__installdirs = "$(DESTDIR)$(pyembeddir)" +DATA = $(pyembed_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ @@ -355,18 +355,19 @@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ SUBDIRS = -wrappersdir = $(pkgdatadir)/wrappers -wrappers_DATA = \ - set_python_env.py \ +pyembeddir = $(pkgdatadir)/python/pyembed +pyembed_DATA = \ + python_embedding.py \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ write_tmp_mpr.py -EXTRA_DIST = ${wrappers_DATA} +EXTRA_DIST = ${pyembed_DATA} MAINTAINERCLEANFILES = Makefile.in all: all-recursive @@ -380,9 +381,9 @@ $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign data/wrappers/Makefile'; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/pyembed/Makefile'; \ $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign data/wrappers/Makefile + $(AUTOMAKE) --foreign scripts/python/pyembed/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ @@ -400,27 +401,27 @@ $(top_srcdir)/configure: $(am__configure_deps) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): -install-wrappersDATA: $(wrappers_DATA) +install-pyembedDATA: $(pyembed_DATA) @$(NORMAL_INSTALL) - @list='$(wrappers_DATA)'; test -n "$(wrappersdir)" || list=; \ + @list='$(pyembed_DATA)'; test -n "$(pyembeddir)" || list=; \ if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(wrappersdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(wrappersdir)" || exit 1; \ + echo " $(MKDIR_P) '$(DESTDIR)$(pyembeddir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pyembeddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(wrappersdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(wrappersdir)" || exit $$?; \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pyembeddir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pyembeddir)" || exit $$?; \ done -uninstall-wrappersDATA: +uninstall-pyembedDATA: @$(NORMAL_UNINSTALL) - @list='$(wrappers_DATA)'; test -n "$(wrappersdir)" || list=; \ + @list='$(pyembed_DATA)'; test -n "$(pyembeddir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(wrappersdir)'; $(am__uninstall_files_from_dir) + dir='$(DESTDIR)$(pyembeddir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. @@ -584,7 +585,7 @@ check: check-recursive all-am: Makefile $(DATA) installdirs: installdirs-recursive installdirs-am: - for dir in "$(DESTDIR)$(wrappersdir)"; do \ + for dir in "$(DESTDIR)$(pyembeddir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive @@ -638,7 +639,7 @@ info: info-recursive info-am: -install-data-am: install-wrappersDATA +install-data-am: install-pyembedDATA install-dvi: install-dvi-recursive @@ -682,7 +683,7 @@ ps: ps-recursive ps-am: -uninstall-am: uninstall-wrappersDATA +uninstall-am: uninstall-pyembedDATA .MAKE: $(am__recursive_targets) install-am install-strip @@ -693,11 +694,11 @@ uninstall-am: uninstall-wrappersDATA install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ - install-ps install-ps-am install-strip install-wrappersDATA \ + install-ps install-ps-am install-pyembedDATA install-strip \ installcheck installcheck-am installdirs installdirs-am \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic pdf pdf-am ps ps-am tags tags-am uninstall \ - uninstall-am uninstall-wrappersDATA + uninstall-am uninstall-pyembedDATA .PRECIOUS: Makefile diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py new file mode 100644 index 0000000000..434679b185 --- /dev/null +++ b/scripts/python/pyembed/python_embedding.py @@ -0,0 +1,113 @@ + +######################################################################## +# +# Common APIs for python wrappers by Howard Soh (from scripts by +# George McCabe and Randy Bullock). +# +# This is called when an user specifies python executable (MET_PYTHON_EXE). +# The target python object is saved as a temporary file by user defined python. +# And the python binaries (complied with MET) reads the temporary file and +# builds the python object for MET. +# The temporary file can be any form with matching write/read scripts. +# - NetCDF for gridded data and point observation data. +# - text file (ASCII data) (MPR, point observation). +# +# NOTE: sys.argv is changed by calling call_embedded_python +# +######################################################################## + +import os +import sys +from importlib import util as import_util + +class pyembed_tools(): + + debug = False + class_name = "pyembed_tools" + + @staticmethod + def add_python_path(called_file): # called_file = __file__ + method_name = f"{pyembed_tools.class_name}.add_python_path()" + script_dir = os.path.abspath(os.path.dirname(called_file)) + if os.path.exists(script_dir) and script_dir != os.curdir: + if pyembed_tools.debug: + print(f"{method_name} added python path {script_dir}") + sys.path.append(os.path.abspath(script_dir)) + + # testing purpose (to switch the python path by using MET_BASE) + met_base_dir = os.environ.get('MET_BASE', None) + if met_base_dir is not None: + met_python_path = os.path.join(met_base_dir, 'python') + if pyembed_tools.debug: + print(f"{method_name} added python path {os.path.abspath(met_python_path)} from MET_BASE") + sys.path.append(os.path.abspath(met_python_path)) + + # add share/met/python directory to system path + met_python_path = os.path.join(script_dir, os.pardir, 'python') + if not os.path.exists(met_python_path): + met_python_path = os.path.join(script_dir, os.pardir, os.pardir, 'python') + if os.path.exists(met_python_path) and met_python_path != met_base_dir: + if pyembed_tools.debug: + print(f"{method_name} added python path {os.path.abspath(met_python_path)}") + sys.path.append(os.path.abspath(met_python_path)) + else: + print(" - {d} does not exist".format(d=met_python_path)) + + @staticmethod + def call_python(argv): + print("Python Script:\t" + repr(argv[0])) + print("User Command:\t" + repr(' '.join(argv[2:]))) + print("Temporary File:\t" + repr(argv[1])) + + # argv[0] is the python wrapper script (caller) + # argv[1] contains the temporary filename + # argv[2] contains the user defined python script + pyembed_module_name = argv[2] + sys.argv = argv[2:] + + # add share/met/python directory to system path to find met_point_obs + pyembed_tools.add_python_path(pyembed_module_name) + + # append user script dir to system path + pyembed_dir, _ = os.path.split(pyembed_module_name) + if pyembed_dir: + sys.path.insert(0, pyembed_dir) + + if not pyembed_module_name.endswith('.py'): + pyembed_module_name += '.py' + + user_base = os.path.basename(pyembed_module_name).replace('.py','') + + spec = import_util.spec_from_file_location(user_base, pyembed_module_name) + met_in = import_util.module_from_spec(spec) + spec.loader.exec_module(met_in) + return met_in + + @staticmethod + def read_tmp_ascii(filename): + """ + Arguments: + filename (string): temporary file created by write_tmp_point.py or write_tmp_mpr.py + + Returns: + (list of lists): point or mpr data + """ + f = open(filename, 'r') + lines = f.readlines() + f.close() + + ascii_data = [eval(line.strip('\n')) for line in lines] + + return ascii_data + + @staticmethod + def write_tmp_ascii(filename, met_data): + with open(filename, 'w') as f: + for line in met_data: + f.write(str(line) + '\n') + + +if __name__ == '__main__': + argv_org = sys.argv[:] # save original sys.argv + met_in = pyembed_tools.call_python(os.path.dirname(__file__), sys.argv) + sys.argv[:] = argv_org[:] # restore diff --git a/data/wrappers/read_tmp_ascii.py b/scripts/python/pyembed/read_tmp_ascii.py similarity index 52% rename from data/wrappers/read_tmp_ascii.py rename to scripts/python/pyembed/read_tmp_ascii.py index fb7eb7b4e7..1e9573171c 100644 --- a/data/wrappers/read_tmp_ascii.py +++ b/scripts/python/pyembed/read_tmp_ascii.py @@ -20,29 +20,22 @@ import argparse +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools + def read_tmp_ascii(filename): - """ - Arguments: - filename (string): temporary file created by write_tmp_point.py or write_tmp_mpr.py - - Returns: - (list of lists): point or mpr data - """ - f = open(filename, 'r') - lines = f.readlines() - f.close() - - global ascii_data - ascii_data = [eval(line.strip('\n')) for line in lines] - - return ascii_data + global ascii_data # defined at python_handler.cc (tmp_list_name) + ascii_data = pyembed_tools.read_tmp_ascii(filename) + return ascii_data if __name__ == '__main__': - """ - Parse command line arguments - """ - parser = argparse.ArgumentParser() - parser.add_argument('--filename', type=str) - args = parser.parse_args() - - data = read_tmp_ascii(args.filename) + """ + Parse command line arguments + """ + parser = argparse.ArgumentParser() + parser.add_argument('--filename', type=str) + args = parser.parse_args() + + data = read_tmp_ascii(args.filename) diff --git a/scripts/python/pyembed/read_tmp_dataplane.py b/scripts/python/pyembed/read_tmp_dataplane.py new file mode 100644 index 0000000000..aa2bc6046a --- /dev/null +++ b/scripts/python/pyembed/read_tmp_dataplane.py @@ -0,0 +1,16 @@ +######################################################################## +# +# Reads temporary file into memory. +# +# usage: /path/to/python read_tmp_dataplane.py dataplane.tmp +# +######################################################################## + +import sys + +# PYTHON path for met.dataplane is added by write_tmp_dataplane.py +from met.dataplane import dataplane + +netcdf_filename = sys.argv[1] +# read NetCDF file +met_info = dataplane.read_dataplane(netcdf_filename) diff --git a/scripts/python/pyembed/read_tmp_point_nc.py b/scripts/python/pyembed/read_tmp_point_nc.py new file mode 100644 index 0000000000..622405c520 --- /dev/null +++ b/scripts/python/pyembed/read_tmp_point_nc.py @@ -0,0 +1,28 @@ +######################################################################## +# +# Reads temporary point obs. file into memory. +# +# usage: /path/to/python read_tmp_point_nc.py tmp_output_filename +# +######################################################################## + +import sys + +from met.point import met_point_tools +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools + +input_filename = sys.argv[1] + +# read NetCDF file +print('{p} reading {f}'.format(p=met_point_tools.get_prompt(), f=input_filename)) +try: + point_obs_data = met_point_tools.get_nc_point_obs() + point_obs_data.read_data(input_filename) + + met_point_data = point_obs_data.get_point_data() + met_point_data['met_point_data'] = point_obs_data +except: + point_data = pyembed_tools.read_tmp_ascii(input_filename) diff --git a/data/wrappers/set_python_env.py b/scripts/python/pyembed/set_python_env.py similarity index 100% rename from data/wrappers/set_python_env.py rename to scripts/python/pyembed/set_python_env.py diff --git a/scripts/python/pyembed/write_tmp_dataplane.py b/scripts/python/pyembed/write_tmp_dataplane.py new file mode 100644 index 0000000000..991ca0c0fd --- /dev/null +++ b/scripts/python/pyembed/write_tmp_dataplane.py @@ -0,0 +1,27 @@ +######################################################################## +# +# Adapted from a script provided by George McCabe +# Adapted by Randy Bullock +# +# usage: /path/to/python write_tmp_dataplane.py \ +# tmp_output_filename .py +# +######################################################################## + +import sys + +try: + from python_embedding import pyembed_tools + pyembed_tools.add_python_path(__file__) +except: + from pyembed.python_embedding import pyembed_tools + +from met.dataplane import dataplane + +#def write_dataplane(met_in, netcdf_filename): +# dataplane.write_dataplane(met_in, netcdf_filename) + +if __name__ == '__main__': + netcdf_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + dataplane.write_dataplane(met_in, netcdf_filename) diff --git a/scripts/python/pyembed/write_tmp_mpr.py b/scripts/python/pyembed/write_tmp_mpr.py new file mode 100644 index 0000000000..0e6141b76c --- /dev/null +++ b/scripts/python/pyembed/write_tmp_mpr.py @@ -0,0 +1,22 @@ +######################################################################## +# +# Adapted from a script provided by George McCabe +# Adapted by Randy Bullock +# +# usage: /path/to/python write_tmp_mpr.py \ +# tmp_output_filename .py +# +######################################################################## + +import sys +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools + +if __name__ == '__main__': + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.mpr_data) diff --git a/scripts/python/pyembed/write_tmp_point.py b/scripts/python/pyembed/write_tmp_point.py new file mode 100644 index 0000000000..95f2992094 --- /dev/null +++ b/scripts/python/pyembed/write_tmp_point.py @@ -0,0 +1,21 @@ +######################################################################## +# +# Adapted from a script provided by George McCabe +# Adapted by Randy Bullock +# +# usage: /path/to/python write_tmp_point.py \ +# tmp_output_filename .py +# +######################################################################## + +import sys + +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools + +if __name__ == '__main__': + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) diff --git a/scripts/python/pyembed/write_tmp_point_nc.py b/scripts/python/pyembed/write_tmp_point_nc.py new file mode 100644 index 0000000000..6d6c69f693 --- /dev/null +++ b/scripts/python/pyembed/write_tmp_point_nc.py @@ -0,0 +1,38 @@ +######################################################################## +# +# Adapted from a script provided by George McCabe +# Adapted by Howard Soh +# +# usage: /path/to/python write_tmp_point_nc.py \ +# tmp_output_filename .py +# +######################################################################## + +import os +import sys + +try: + from python_embedding import pyembed_tools + pyembed_tools.add_python_path(__file__) +except: + from pyembed.python_embedding import pyembed_tools + + +from met.point import met_point_tools + +if __name__ == '__main__': + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + + if hasattr(met_in, 'point_data'): + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) + elif hasattr(met_in, 'point_obs_data'): + met_in.point_obs_data.save_ncfile(tmp_filename) + else: + if hasattr(met_in.met_point_data, 'point_obs_data'): + met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) + else: + tmp_point_obs = met_point_tools.get_nc_point_obs() + tmp_point_obs.put_data(met_in.met_point_data) + tmp_point_obs.save_ncfile(tmp_filename) diff --git a/scripts/python/read_ascii_mpr.py b/scripts/python/read_ascii_mpr.py deleted file mode 100755 index fa71b8e6d2..0000000000 --- a/scripts/python/read_ascii_mpr.py +++ /dev/null @@ -1,33 +0,0 @@ -import pandas as pd -import os -import sys - -######################################################################## - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 2: - print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -try: - print("Input File:\t" + repr(input_file)) - - # Read MPR lines, skipping the header row and first column. - mpr_data = pd.read_csv(input_file, header=None, - delim_whitespace=True, keep_default_na=False, - skiprows=1, usecols=range(1,37), - dtype=str).values.tolist() - print("Data Length:\t" + repr(len(mpr_data))) - print("Data Type:\t" + repr(type(mpr_data))) -except NameError: - print("Can't find the input file") - -######################################################################## diff --git a/scripts/python/read_ascii_numpy.py b/scripts/python/read_ascii_numpy.py deleted file mode 100755 index 6d129afc1c..0000000000 --- a/scripts/python/read_ascii_numpy.py +++ /dev/null @@ -1,75 +0,0 @@ -import numpy as np -import os -import sys - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - -} - -print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/read_ascii_numpy_grid.py b/scripts/python/read_ascii_numpy_grid.py deleted file mode 100755 index 3e4cc25f69..0000000000 --- a/scripts/python/read_ascii_numpy_grid.py +++ /dev/null @@ -1,51 +0,0 @@ -import numpy as np -import os -import sys - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - 'grid': os.path.expandvars(os.getenv('PYTHON_GRID')) -} - -print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/read_ascii_point.py b/scripts/python/read_ascii_point.py deleted file mode 100755 index 7fb8eb076a..0000000000 --- a/scripts/python/read_ascii_point.py +++ /dev/null @@ -1,48 +0,0 @@ -import pandas as pd -import os -import sys -from met_point_obs import convert_point_data - -######################################################################## - -print("Python Script:\t" + repr(sys.argv[0])) - -## -## input file specified on the command line -## load the data into the numpy array -## - -if len(sys.argv) != 2: - print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -try: - print("Input File:\t" + repr(input_file)) - - # Read and format the input 11-column observations: - # (1) string: Message_Type - # (2) string: Station_ID - # (3) string: Valid_Time(YYYYMMDD_HHMMSS) - # (4) numeric: Lat(Deg North) - # (5) numeric: Lon(Deg East) - # (6) numeric: Elevation(msl) - # (7) string: Var_Name(or GRIB_Code) - # (8) numeric: Level - # (9) numeric: Height(msl or agl) - # (10) string: QC_String - # (11) numeric: Observation_Value - - point_data = pd.read_csv(input_file, header=None, delim_whitespace=True, keep_default_na=False, - names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], - dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() - print(" point_data: Data Length:\t" + repr(len(point_data))) - print(" point_data: Data Type:\t" + repr(type(point_data))) - met_point_data = convert_point_data(point_data) - print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) -except NameError: - print("Can't find the input file") - sys.exit(1) - -######################################################################## diff --git a/scripts/python/read_ascii_xarray.py b/scripts/python/read_ascii_xarray.py deleted file mode 100755 index 6e906863a7..0000000000 --- a/scripts/python/read_ascii_xarray.py +++ /dev/null @@ -1,93 +0,0 @@ -import numpy as np -import os -import sys -import xarray as xr - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - -} - -print("Attributes:\t" + repr(attrs)) - -# Create an xarray DataArray object -da = xr.DataArray(met_data) -ds = xr.Dataset({"fcst":da}) - -# Add the attributes to the dataarray object -ds.attrs = attrs - -# Delete the local variable attrs to mimic the real world, -# where a user will rely on da.attrs rather than construct it themselves -del attrs - -# Delete the met_data variable, and reset it to be the Xarray object -del met_data - -# Create met_data and specify attrs because XR doesn't persist them. -met_data = xr.DataArray(ds.fcst, attrs=ds.attrs) diff --git a/scripts/python/read_met_point_obs.py b/scripts/python/read_met_point_obs.py deleted file mode 100755 index 57ccd22e7a..0000000000 --- a/scripts/python/read_met_point_obs.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python3 -''' -Created on Nov 10, 2021 - -@author: hsoh - -This script reads the MET point observation NetCDF file like MET tools do. - -Usage: - - python3 read_met_point_obs.py - python3 read_met_point_obs.py - : 11 columns - 'typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs' - string columns: 'typ', 'sid', 'vld', 'var', , 'qc' - numeric columns 'lat', 'lon', 'elv', 'lvl', 'hgt', 'qc', 'obs' - python3 read_met_point_obs.py - -''' - -import os -import sys -from datetime import datetime - -met_base_dir = os.getenv('MET_BASE',None) -if met_base_dir is not None: - sys.path.append(os.path.join(met_base_dir, 'python')) - -from met_point_obs import met_point_obs, sample_met_point_obs -from met_point_obs_nc import nc_point_obs - -DO_PRINT_DATA = False -ARG_PRINT_DATA = 'show_data' - -start_time = datetime.now() - -prompt = met_point_obs.get_prompt() -point_obs_data = None -if len(sys.argv) == 1 or ARG_PRINT_DATA == sys.argv[1]: - point_obs_data = sample_met_point_obs() - point_obs_data.read_data([]) -elif met_point_obs.is_python_prefix(sys.argv[1]): - import importlib.util - - print("{p} Python Script:\t".format(p=prompt) + repr(sys.argv[0])) - print("{p} User Command:\t".format(p=prompt) + repr(' '.join(sys.argv[2:]))) - - pyembed_module_name = sys.argv[2] - sys.argv = sys.argv[1:] - - # append user script dir to system path - pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) - if pyembed_dir: - sys.path.insert(0, pyembed_dir) - - if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - os.environ[met_point_obs.MET_ENV_RUN] = "TRUE" - - user_base = os.path.basename(pyembed_module_name).replace('.py','') - - spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) - met_in = importlib.util.module_from_spec(spec) - spec.loader.exec_module(met_in) - - met_point_obs = met_in.met_point_obs - print("met_point_obs: ", met_point_obs) - met_point_data = met_in.met_point_data - print("met_point_data: ", met_point_data) - #print(hasattr("met_in: ", dir(met_in))) - #met_point_data = met_point_obs.get_point_data() - #met_point_data = None if met_in.get('met_point_data', None) else met_in.met_point_data - #met_data = None if met_in.get('met_data', None) else met_in.met_data - print(met_point_data) -else: - netcdf_filename = sys.argv[1] - args = [ netcdf_filename ] - #args = { 'nc_name': netcdf_filename } - point_obs_data = nc_point_obs() - point_obs_data.read_data(point_obs_data.get_nc_filename(args)) - -if point_obs_data is not None: - met_point_data = point_obs_data.get_point_data() - met_point_data['met_point_data'] = point_obs_data - - if DO_PRINT_DATA or ARG_PRINT_DATA == sys.argv[-1]: - point_obs_data.dump() - -run_time = datetime.now() - start_time - -print('{p} Done python script {s} took {t}'.format(p=prompt, s=sys.argv[0], t=run_time)) diff --git a/scripts/utility/Makefile.am b/scripts/python/utility/Makefile.am similarity index 96% rename from scripts/utility/Makefile.am rename to scripts/python/utility/Makefile.am index d807a69977..5efd02b01e 100644 --- a/scripts/utility/Makefile.am +++ b/scripts/python/utility/Makefile.am @@ -23,7 +23,7 @@ ## scriptsrootdir = $(prefix)/share/scripts ## pythonutilitydir = ${scriptsrootdir}/utility -pythonutilitydir = $(pkgdatadir)/utility +pythonutilitydir = $(pkgdatadir)/python/utility pythonutility_DATA = \ print_pointnc2ascii.py \ diff --git a/scripts/utility/Makefile.in b/scripts/python/utility/Makefile.in similarity index 98% rename from scripts/utility/Makefile.in rename to scripts/python/utility/Makefile.in index bdaec7b3f9..7a994964d1 100644 --- a/scripts/utility/Makefile.in +++ b/scripts/python/utility/Makefile.in @@ -90,7 +90,7 @@ PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ -subdir = scripts/utility +subdir = scripts/python/utility ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ @@ -296,7 +296,7 @@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ -pythonutilitydir = $(pkgdatadir)/utility +pythonutilitydir = $(pkgdatadir)/python/utility pythonutility_DATA = \ print_pointnc2ascii.py \ build_ndbc_stations_from_web.py @@ -315,9 +315,9 @@ $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/utility/Makefile'; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/utility/Makefile'; \ $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign scripts/utility/Makefile + $(AUTOMAKE) --foreign scripts/python/utility/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ diff --git a/scripts/utility/build_ndbc_stations_from_web.py b/scripts/python/utility/build_ndbc_stations_from_web.py similarity index 100% rename from scripts/utility/build_ndbc_stations_from_web.py rename to scripts/python/utility/build_ndbc_stations_from_web.py diff --git a/scripts/utility/print_pointnc2ascii.py b/scripts/python/utility/print_pointnc2ascii.py similarity index 100% rename from scripts/utility/print_pointnc2ascii.py rename to scripts/python/utility/print_pointnc2ascii.py diff --git a/src/basic/vx_util/python_line.cc b/src/basic/vx_util/python_line.cc index 33b08806c7..555e6151ee 100644 --- a/src/basic/vx_util/python_line.cc +++ b/src/basic/vx_util/python_line.cc @@ -32,9 +32,13 @@ using namespace std; //////////////////////////////////////////////////////////////////////// -static const char set_python_env_wrapper [] = "set_python_env"; +static const char env_PYTHONPATH [] = "PYTHONPATH"; -static const char write_tmp_mpr_wrapper [] = "MET_BASE/wrappers/write_tmp_mpr.py"; +static const char met_python_path [] = "MET_BASE/python"; + +static const char set_python_env_wrapper [] = "pyembed.set_python_env"; + +static const char write_tmp_mpr_wrapper [] = "MET_BASE/python/pyembed/write_tmp_mpr.py"; static const char list_name [] = "mpr_data"; @@ -301,9 +305,7 @@ void PyLineDataFile::do_straight() { -ConcatString command, path, user_base; - -path = set_python_env_wrapper; +ConcatString command, user_base; mlog << Debug(3) << "PyLineDataFile::do_straight() -> " @@ -318,7 +320,7 @@ user_base.chomp(".py"); // start up the python interpreter // -script = new Python3_Script (path.text()); +script = get_python3_script(); // // set up a "new" sys.argv list @@ -432,11 +434,7 @@ if ( status ) { } -ConcatString wrapper; - -wrapper = set_python_env_wrapper; - -script = new Python3_Script (wrapper.text()); +script = get_python3_script(); mlog << Debug(4) << "Reading temporary Python line data file: " << tmp_ascii_path << "\n"; @@ -572,6 +570,36 @@ return; } +//////////////////////////////////////////////////////////////////////// + +Python3_Script *get_python3_script() + +{ + +const char *method_name = "get_python3_script()"; +ConcatString path = set_python_env_wrapper; +ConcatString python_path = met_python_path; + +const char *env_pythonpath = getenv(env_PYTHONPATH); + +if (env_pythonpath) { + python_path = env_pythonpath; + python_path.add(':'); +} +python_path.add(replace_path(met_python_path)); +mlog << Debug(0) << method_name << " -> added python path " + << replace_path(met_python_path) << ") to " << env_PYTHONPATH << "\n"; + +setenv(env_PYTHONPATH, python_path.c_str(),1); + + // + // start up the python interpreter + // + +return new Python3_Script (path.text()); + +} + //////////////////////////////////////////////////////////////////////// diff --git a/src/basic/vx_util/python_line.h b/src/basic/vx_util/python_line.h index 604066e126..9b39e074da 100644 --- a/src/basic/vx_util/python_line.h +++ b/src/basic/vx_util/python_line.h @@ -91,6 +91,9 @@ class PyLineDataFile : public LineDataFile { //////////////////////////////////////////////////////////////////////// +extern Python3_Script *get_python3_script(); + +//////////////////////////////////////////////////////////////////////// #endif /* __PYTHON_LINE_H__ */ diff --git a/src/libcode/vx_data2d_python/python_dataplane.cc b/src/libcode/vx_data2d_python/python_dataplane.cc index fd8524b27f..00217bb7b2 100644 --- a/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/src/libcode/vx_data2d_python/python_dataplane.cc @@ -31,9 +31,9 @@ extern GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_dataplane.py"; +static const char write_tmp_nc [] = "MET_BASE/python/pyembed/write_tmp_dataplane.py"; -static const char read_tmp_nc [] = "read_tmp_dataplane"; // NO ".py" suffix +static const char read_tmp_nc [] = "pyembed.read_tmp_dataplane"; // NO ".py" suffix static const char tmp_nc_var_name [] = "met_info"; diff --git a/src/libcode/vx_pointdata_python/Makefile.am b/src/libcode/vx_pointdata_python/Makefile.am index 5579afc6fe..4045badcde 100644 --- a/src/libcode/vx_pointdata_python/Makefile.am +++ b/src/libcode/vx_pointdata_python/Makefile.am @@ -12,6 +12,7 @@ include ${top_srcdir}/Make-include noinst_LIBRARIES = libvx_pointdata_python.a libvx_pointdata_python_a_SOURCES = \ + mask_filters.h mask_filters.cc \ pointdata_python.h pointdata_python.cc \ pointdata_from_array.h pointdata_from_array.cc pointdata_from_array.hpp \ python_pointdata.h python_pointdata.cc python_pointdata.hpp diff --git a/src/libcode/vx_pointdata_python/Makefile.in b/src/libcode/vx_pointdata_python/Makefile.in index a68210285d..8d36c6b212 100644 --- a/src/libcode/vx_pointdata_python/Makefile.in +++ b/src/libcode/vx_pointdata_python/Makefile.in @@ -108,6 +108,7 @@ am__v_AR_1 = libvx_pointdata_python_a_AR = $(AR) $(ARFLAGS) libvx_pointdata_python_a_LIBADD = am_libvx_pointdata_python_a_OBJECTS = \ + libvx_pointdata_python_a-mask_filters.$(OBJEXT) \ libvx_pointdata_python_a-pointdata_python.$(OBJEXT) \ libvx_pointdata_python_a-pointdata_from_array.$(OBJEXT) \ libvx_pointdata_python_a-python_pointdata.$(OBJEXT) @@ -129,6 +130,7 @@ DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__maybe_remake_depfiles = depfiles am__depfiles_remade = \ + ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po @@ -340,6 +342,7 @@ MAINTAINERCLEANFILES = Makefile.in # The library noinst_LIBRARIES = libvx_pointdata_python.a libvx_pointdata_python_a_SOURCES = \ + mask_filters.h mask_filters.cc \ pointdata_python.h pointdata_python.cc \ pointdata_from_array.h pointdata_from_array.cc pointdata_from_array.hpp \ python_pointdata.h python_pointdata.cc python_pointdata.hpp @@ -393,6 +396,7 @@ mostlyclean-compile: distclean-compile: -rm -f *.tab.c +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po@am__quote@ # am--include-marker @@ -417,6 +421,20 @@ am--depfiles: $(am__depfiles_remade) @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` +libvx_pointdata_python_a-mask_filters.o: mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-mask_filters.o -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo -c -o libvx_pointdata_python_a-mask_filters.o `test -f 'mask_filters.cc' || echo '$(srcdir)/'`mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='mask_filters.cc' object='libvx_pointdata_python_a-mask_filters.o' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o libvx_pointdata_python_a-mask_filters.o `test -f 'mask_filters.cc' || echo '$(srcdir)/'`mask_filters.cc + +libvx_pointdata_python_a-mask_filters.obj: mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-mask_filters.obj -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo -c -o libvx_pointdata_python_a-mask_filters.obj `if test -f 'mask_filters.cc'; then $(CYGPATH_W) 'mask_filters.cc'; else $(CYGPATH_W) '$(srcdir)/mask_filters.cc'; fi` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='mask_filters.cc' object='libvx_pointdata_python_a-mask_filters.obj' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o libvx_pointdata_python_a-mask_filters.obj `if test -f 'mask_filters.cc'; then $(CYGPATH_W) 'mask_filters.cc'; else $(CYGPATH_W) '$(srcdir)/mask_filters.cc'; fi` + libvx_pointdata_python_a-pointdata_python.o: pointdata_python.cc @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-pointdata_python.o -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Tpo -c -o libvx_pointdata_python_a-pointdata_python.o `test -f 'pointdata_python.cc' || echo '$(srcdir)/'`pointdata_python.cc @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Tpo $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po @@ -584,7 +602,8 @@ clean: clean-am clean-am: clean-generic clean-noinstLIBRARIES mostlyclean-am distclean: distclean-am - -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po -rm -f Makefile @@ -632,7 +651,8 @@ install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am - -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po -rm -f Makefile diff --git a/src/libcode/vx_pointdata_python/mask_filters.cc b/src/libcode/vx_pointdata_python/mask_filters.cc new file mode 100644 index 0000000000..bc8aa17d13 --- /dev/null +++ b/src/libcode/vx_pointdata_python/mask_filters.cc @@ -0,0 +1,156 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + + +using namespace std; + +#include "mask_filters.h" + + +//////////////////////////////////////////////////////////////////////// + + + // + // Code for class MaskFilters + // + + +//////////////////////////////////////////////////////////////////////// + + +MaskFilters::MaskFilters(): + grid_mask(0), + area_mask(0), + poly_mask(0), + sid_mask(0), + typ_mask(0) +{ + clear(); +} + +//////////////////////////////////////////////////////////////////////// + +MaskFilters::MaskFilters(Grid *_grid_mask, MaskPlane *_area_mask, MaskPoly *_poly_mask, + StringArray *_sid_mask, StringArray *_typ_mask) { + clear(); + grid_mask = _grid_mask; + area_mask = _area_mask; + poly_mask = _poly_mask; + sid_mask = _sid_mask; + typ_mask = _typ_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::clear() { + grid_mask_cnt = 0; + area_mask_cnt = 0; + poly_mask_cnt = 0; + sid_mask_cnt = 0; + typ_mask_cnt = 0; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered(double lat, double lon) { + bool masked = false; + // Apply the grid mask + if(grid_mask) { + double grid_x, grid_y; + grid_mask->latlon_to_xy(lat, -1.0*lon, grid_x, grid_y); + + if(grid_x < 0 || grid_x >= grid_mask->nx() || + grid_y < 0 || grid_y >= grid_mask->ny()) { + grid_mask_cnt++; + masked = true; + } + + // Apply the area mask + if(area_mask && !masked) { + if(!area_mask->s_is_on(nint(grid_x), nint(grid_y))) { + area_mask_cnt++; + masked = true; + } + } + } + + // Apply the polyline mask + if(poly_mask && !masked) { + if(!poly_mask->latlon_is_inside_dege(lat, lon)) { + poly_mask_cnt++; + masked = true; + } + } + + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered_sid(const char *sid) { + bool masked = false; + + // Apply the station ID mask + if(sid_mask) { + if(!sid_mask->has(sid)) { + sid_mask_cnt++; + masked = true; + } + } + + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered_typ(const char *msg_typ) { + bool masked = false; + + // Apply the message type mask + if(typ_mask) { + if(!typ_mask->has(msg_typ)) { + typ_mask_cnt++; + masked = true; + } + } + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_area_mask(MaskPlane *_area_mask) { + area_mask = _area_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_grid_mask(Grid *_grid_mask) { + grid_mask = _grid_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_poly_mask(MaskPoly *_poly_mask) { + poly_mask = _poly_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_sid_mask(StringArray *_sid_mask) { + sid_mask = _sid_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_typ_mask(StringArray *_typ_mask) { + typ_mask = _typ_mask; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_pointdata_python/mask_filters.h b/src/libcode/vx_pointdata_python/mask_filters.h new file mode 100644 index 0000000000..39d7264138 --- /dev/null +++ b/src/libcode/vx_pointdata_python/mask_filters.h @@ -0,0 +1,82 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + + +//////////////////////////////////////////////////////////////////////// + + +#ifndef __MASK_FILTERS_H__ +#define __MASK_FILTERS_H__ + + +//////////////////////////////////////////////////////////////////////// + + +#include "grid_base.h" + + +//////////////////////////////////////////////////////////////////////// + +class MaskFilters { + +protected: + + int grid_mask_cnt; + int area_mask_cnt; + int poly_mask_cnt; + int typ_mask_cnt; + int sid_mask_cnt; + + Grid *grid_mask; + MaskPlane *area_mask; + MaskPoly *poly_mask; + StringArray *sid_mask; // station IDs to be excuded + StringArray *typ_mask; // message types to be excuded + +public: + + MaskFilters(); + MaskFilters(Grid *grid_mask, MaskPlane *area_mask, + MaskPoly *poly_mask, StringArray *sid_mask, StringArray *typ_mask); + + void clear(); + + int get_area_mask_cnt(); + int get_grid_mask_cnt(); + int get_poly_mask_cnt(); + int get_sid_mask_cnt(); + int get_typ_mask_cnt(); + + bool is_filtered(double lat, double lon); + bool is_filtered_sid(const char *sid); + bool is_filtered_typ(const char *msg_typ); + + void set_area_mask(MaskPlane *_area_mask); + void set_grid_mask(Grid *_grid_mask); + void set_poly_mask(MaskPoly *_poly_mask); + void set_sid_mask(StringArray *_sid_mask); + void set_typ_mask(StringArray *_typ_mask); + +}; + +//////////////////////////////////////////////////////////////////////// + +inline int MaskFilters::get_area_mask_cnt() { return area_mask_cnt; }; +inline int MaskFilters::get_grid_mask_cnt() { return grid_mask_cnt; }; +inline int MaskFilters::get_poly_mask_cnt() { return poly_mask_cnt; }; +inline int MaskFilters::get_sid_mask_cnt() { return sid_mask_cnt; }; +inline int MaskFilters::get_typ_mask_cnt() { return typ_mask_cnt; }; + + +//////////////////////////////////////////////////////////////////////// + + +#endif /* __MASK_FILTERS_H__ */ + + +//////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_pointdata_python/pointdata_python.cc b/src/libcode/vx_pointdata_python/pointdata_python.cc index d349ec9bb8..fc5cfcc858 100644 --- a/src/libcode/vx_pointdata_python/pointdata_python.cc +++ b/src/libcode/vx_pointdata_python/pointdata_python.cc @@ -172,7 +172,9 @@ file_name = full_path; file_name.chomp(".py"); // remove possible ".py" suffix from script filename -bool status = python_point_data(file_name.c_str(), file_argc, file_argv, use_xarray, met_data); +MaskFilters *filters = 0; +bool status = python_point_data(file_name.c_str(), file_argc, file_argv, + met_data, filters); met_data.get_hdr_cnt(); met_data.get_obs_cnt(); diff --git a/src/libcode/vx_pointdata_python/python_pointdata.cc b/src/libcode/vx_pointdata_python/python_pointdata.cc index d389596fea..4b3834a1d2 100644 --- a/src/libcode/vx_pointdata_python/python_pointdata.cc +++ b/src/libcode/vx_pointdata_python/python_pointdata.cc @@ -8,7 +8,10 @@ //////////////////////////////////////////////////////////////////////// +#include +#include +#include "observation.h" #include "vx_python3_utils.h" #include "python_pointdata.h" #include "pointdata_from_array.h" @@ -27,22 +30,120 @@ extern GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_point_nc.py"; +static const char write_tmp_nc [] = "MET_BASE/python/pyembed/write_tmp_point_nc.py"; -static const char read_tmp_nc [] = "read_tmp_point_nc"; // NO ".py" suffix +static const char read_tmp_nc [] = "pyembed.read_tmp_point_nc"; // NO ".py" suffix //////////////////////////////////////////////////////////////////////// static bool tmp_nc_point_obs(const char * script_name, int user_script_argc, - char ** user_script_argv, MetPointDataPython &met_pd_out); + char ** user_script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters); static bool straight_python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out); + MetPointDataPython &met_pd_out, + MaskFilters *filters); + +bool process_point_data(PyObject *module_obj, MetPointDataPython &met_pd_out); +bool process_point_data_list(PyObject *python_obj, MetPointDataPython &met_pd_out, + MaskFilters *filters); + +//////////////////////////////////////////////////////////////////////// + +void check_header_data(MetPointHeader *header_data, const char *caller) { + + if (header_data->typ_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_typ is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->sid_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_sid is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->vld_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_vld is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->lat_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_lat is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->lon_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_lon is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->elv_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_elv is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + + if (header_data->typ_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_typ_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->sid_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_sid_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->vld_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_vld_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } +} //////////////////////////////////////////////////////////////////////// +void check_obs_data(MetPointObsData *obs_data, bool use_var_id, const char *caller) { + + if (obs_data->qty_names.n() == 0) { + mlog << Error << "\n" << caller + << "The obs_qty_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (use_var_id && obs_data->var_names.n() == 0) { + mlog << Error << "\n" << caller + << "The obs_var_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + +} + +//////////////////////////////////////////////////////////////////////// + + +PyObject *get_python_object(PyObject *module_obj, const char *python_var_name) +{ + + // + // get the namespace for the module (as a dictionary) + // + + PyObject *module_dict_obj = PyModule_GetDict (module_obj); + + // + // get handles to the objects of interest from the module_dict + // + + PyObject *python_met_point_data = PyDict_GetItemString (module_dict_obj, python_var_name); + + return python_met_point_data; +} + + +//////////////////////////////////////////////////////////////////////// + + static void set_str_array_from_python(PyObject *python_data, const char *python_key, StringArray *out) { const char *method_name = "set_met_array_from_python(StringArray *) -> "; PyObject *str_array_obj = PyDict_GetItemString (python_data, python_key); @@ -63,7 +164,7 @@ static void set_str_array_from_python(PyObject *python_data, const char *python_ bool python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out) + MetPointDataPython &met_pd_out, MaskFilters *filters) { @@ -71,16 +172,15 @@ bool status = false; if ( user_ppath == 0 ) user_ppath = getenv(user_python_path_env); -if ( user_ppath != 0 ) { - // do_tmp_nc = true; +if ( user_ppath != 0 ) { // do_tmp_nc = true; status = tmp_nc_point_obs(script_name, script_argc, script_argv, - met_pd_out); + met_pd_out, filters); } else { status = straight_python_point_data(script_name, script_argc, script_argv, - use_xarray, met_pd_out); + met_pd_out, filters); } return ( status ); @@ -89,29 +189,21 @@ return ( status ); //////////////////////////////////////////////////////////////////////// -bool process_python_point_data(PyObject *module_obj, MetPointDataPython &met_pd_out) +bool process_point_data(PyObject *python_met_point_data, + MetPointDataPython &met_pd_out) + { int int_value; -PyObject *module_dict_obj = 0; PyObject *python_value = 0; -PyObject *python_met_point_data = 0; ConcatString cs, user_dir, user_base; -const char *method_name = "process_python_point_data -> "; -const char *method_name_s = "process_python_point_data()"; - - // - // get the namespace for the module (as a dictionary) - // - -module_dict_obj = PyModule_GetDict (module_obj); +const char *method_name = "process_point_data -> "; +const char *method_name_s = "process_point_data()"; // // get handles to the objects of interest from the module_dict // -python_met_point_data = PyDict_GetItemString (module_dict_obj, python_key_point_data); - python_value = PyDict_GetItemString (python_met_point_data, python_use_var_id); bool use_var_id = pyobject_as_bool(python_value); @@ -142,8 +234,7 @@ met_pd_out.allocate(int_value); MetPointObsData *obs_data = met_pd_out.get_point_obs_data(); MetPointHeader *header_data = met_pd_out.get_header_data(); - - // look up the data array variable name from the dictionary + // look up the data array variable name from the dictionary set_array_from_python(python_met_point_data, numpy_array_hdr_typ, &header_data->typ_idx_array); set_array_from_python(python_met_point_data, numpy_array_hdr_sid, &header_data->sid_idx_array); @@ -151,59 +242,17 @@ MetPointHeader *header_data = met_pd_out.get_header_data(); set_array_from_python(python_met_point_data, numpy_array_hdr_lat, &header_data->lat_array); set_array_from_python(python_met_point_data, numpy_array_hdr_lon, &header_data->lon_array); set_array_from_python(python_met_point_data, numpy_array_hdr_elv, &header_data->elv_array); - if (header_data->typ_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_typ is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->sid_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_sid is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->vld_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_vld is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->lat_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_lat is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->lon_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_lon is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->elv_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_elv is empty. Please check if python input is processed properly\n\n"; - exit (1); - } set_str_array_from_python(python_met_point_data, numpy_array_hdr_typ_table, &header_data->typ_array); set_str_array_from_python(python_met_point_data, numpy_array_hdr_sid_table, &header_data->sid_array); set_str_array_from_python(python_met_point_data, numpy_array_hdr_vld_table, &header_data->vld_array); - if (header_data->typ_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_typ_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->sid_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_sid_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->vld_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_vld_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } + set_array_from_python(python_met_point_data, numpy_array_prpt_typ_table, &header_data->prpt_typ_array, false); set_array_from_python(python_met_point_data, numpy_array_irpt_typ_table, &header_data->irpt_typ_array, false); set_array_from_python(python_met_point_data, numpy_array_inst_typ_table, &header_data->inst_typ_array, false); + check_header_data(header_data, method_name); + set_array_from_python(python_met_point_data, numpy_array_obs_qty, obs_data->obs_qids); set_array_from_python(python_met_point_data, numpy_array_obs_hid, obs_data->obs_hids); set_array_from_python(python_met_point_data, numpy_array_obs_vid, obs_data->obs_ids); @@ -213,24 +262,173 @@ MetPointHeader *header_data = met_pd_out.get_header_data(); set_str_array_from_python(python_met_point_data, numpy_array_obs_qty_table, &obs_data->qty_names); set_str_array_from_python(python_met_point_data, numpy_array_obs_var_table, &obs_data->var_names); - if (obs_data->qty_names.n() == 0) { + + check_obs_data(obs_data, use_var_id, method_name); + + if(mlog.verbosity_level()>=point_data_debug_level) { + print_met_data(met_pd_out.get_point_obs_data(), + met_pd_out.get_header_data(), method_name_s); + } + + // + // done + // + +return ( true ); + +} + + +//////////////////////////////////////////////////////////////////////// + +bool process_point_data_list(PyObject *python_point_data, MetPointDataPython &met_pd_out, + MaskFilters *filters) +{ + + bool use_var_id; + Observation obs; + time_t vld_time; + int hid, vid, qid, sid, typ_idx, vld_idx; + double lat, lon, elv, hgt, level, obs_value; + double prev_lat, prev_lon, prev_elv, prev_vld, prev_typ, prev_sid; + Python3_List list(python_point_data); + const char *method_name = "process_point_data_list -> "; + const char *method_name_s = "process_point_data_list()"; + + int obs_cnt = list.size(); + if (obs_cnt == 0) { mlog << Error << "\n" << method_name - << "The obs_qty_table is empty. Please check if python input is processed properly\n\n"; + << "The point observation data is empty. Please check if python input is processed properly\n\n"; exit (1); } - if (use_var_id && obs_data->var_names.n() == 0) { + + // + // initialize use_var_id to false + // + + use_var_id = false; + hid = -1; // starts from -1 to be 0 for the first header + prev_lat = prev_lon = prev_elv = bad_data_double; + prev_vld = prev_typ = prev_sid = bad_data_double; + + met_pd_out.allocate(obs_cnt); + MetPointHeader *header_data = met_pd_out.get_header_data(); + MetPointObsData *obs_data = met_pd_out.get_point_obs_data(); + + for (int j=0; jis_filtered(lat, lon)) continue; + if (filters->is_filtered_sid(obs.getStationId().c_str())) continue; + if (filters->is_filtered_typ(obs.getHeaderType().c_str())) continue; + } + + // get message type index + str_data = obs.getHeaderType(); + if ( !header_data->typ_array.has(str_data, typ_idx) ) { + header_data->typ_array.add(str_data); + header_data->typ_array.has(str_data, typ_idx); + } + + // get station ID index + str_data = obs.getStationId(); + if ( !header_data->sid_array.has(str_data, sid) ) { + header_data->sid_array.add(str_data); + header_data->sid_array.has(str_data, sid); + } + + // get valid time index + vld_time = obs.getValidTime(); + if ( !header_data->vld_num_array.has(vld_time, vld_idx) ) { + header_data->vld_num_array.add(vld_time); + header_data->vld_num_array.has(vld_time, vld_idx); + } + + if (!is_eq(prev_lat, lat) || !is_eq(prev_lon, lon) || !is_eq(prev_elv, elv) + || !is_eq(prev_sid, sid) || !is_eq(prev_typ, typ_idx) + || !is_eq(prev_vld, vld_idx)) { + header_data->lat_array.add(lat); + header_data->lon_array.add(lon); + header_data->elv_array.add(elv); + header_data->sid_idx_array.add(sid); + header_data->typ_idx_array.add(typ_idx); + header_data->vld_idx_array.add(vld_idx); + header_data->vld_array.add(obs.getValidTimeString()); + + prev_lat = lat; + prev_lon = lon; + prev_elv = elv; + prev_sid = sid; + prev_typ = typ_idx; + prev_vld = vld_idx; + hid++; + } + obs_data->obs_hids[j] = hid; + + // get the observation variable code + str_data = obs.getVarName(); + if ( use_var_id || !is_number(str_data.c_str()) ) { + use_var_id = true; + // update the list of variable names + if ( !obs_data->var_names.has(str_data, vid) ) { + obs_data->var_names.add(str_data); + obs_data->var_names.has(str_data, vid); + } + } + else { + vid = atoi(obs.getVarName().c_str()); + } + obs_data->obs_ids[j] = vid; + obs.setVarCode(vid); + + // get the quality flag index + str_data = obs.getQualityFlag(); + if ( !obs_data->qty_names.has(str_data, qid) ) { + obs_data->qty_names.add(str_data); + obs_data->qty_names.has(str_data, qid); + } + obs_data->obs_qids[j] = qid; + obs_data->obs_lvls[j] = obs.getPressureLevel(); + obs_data->obs_hgts[j] = obs.getHeight(); + obs_data->obs_vals[j] = obs.getValue(); + + } // for j + + met_pd_out.set_use_var_id(use_var_id); + mlog << Debug(9) << method_name << "use_var_id: \"" << use_var_id + << "\" from python. is_using_var_id(): " << met_pd_out.is_using_var_id() << "\n"; + + if (hid <= 0) { mlog << Error << "\n" << method_name - << "The obs_var_table is empty. Please check if python input is processed properly\n\n"; + << "The header is empty. Please check the python script and input\n\n"; exit (1); } + met_pd_out.set_hdr_cnt(hid); - if(mlog.verbosity_level()>=point_data_debug_level) print_met_data(obs_data, header_data, method_name_s); + check_obs_data(obs_data, use_var_id, method_name); + check_header_data(header_data, method_name); + + if(mlog.verbosity_level()>=point_data_debug_level) { + print_met_data(met_pd_out.get_point_obs_data(), + met_pd_out.get_header_data(), method_name_s); + } // // done // -return ( true ); + return ( true ); } @@ -239,17 +437,14 @@ return ( true ); bool straight_python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out) + MetPointDataPython &met_pd_out, MaskFilters *filters) { int int_value; PyObject *module_obj = 0; -PyObject *module_dict_obj = 0; PyObject *python_value = 0; -PyObject *python_met_point_data = 0; ConcatString cs, user_dir, user_base; const char *method_name = "straight_python_point_data -> "; -const char *method_name_s = "straight_python_point_data()"; cs = script_name; @@ -349,9 +544,24 @@ if ( ! module_obj ) { } +bool result = false; +PyObject *met_point_data = get_python_object(module_obj, python_key_point_data); +if ( met_point_data && met_point_data != &_Py_NoneStruct) { + result = process_point_data(met_point_data, met_pd_out); +} +else { + PyObject *point_data = get_python_object(module_obj, python_key_point_data_list); + if ( point_data && point_data != &_Py_NoneStruct) + result = process_point_data_list(point_data, met_pd_out, filters); + else { + mlog << Warning << "\n" << method_name + << "no \"" << python_key_point_data << "\" and \"" + << python_key_point_data_list << "\" from " + << script_name << "\"\n\n"; + } +} -return process_python_point_data(module_obj, met_pd_out); - +return result; } @@ -359,7 +569,8 @@ return process_python_point_data(module_obj, met_pd_out); bool tmp_nc_point_obs(const char * user_script_name, int user_script_argc, - char ** user_script_argv, MetPointDataPython &met_pd_out) + char ** user_script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters) { @@ -402,6 +613,10 @@ command << cs_erase << replace_path(python_dir) << "\")"; run_python_string(command.text()); +mlog << Debug(0) << method_name << " -> added python path " + << python_dir << ") to python interpreter\n"; + +//setenv(env_PYTHONPATH, python_dir.c_str(),1); mlog << Debug(3) << "Running user-specified python instance (MET_PYTHON_EXE=" << user_ppath << ") to run user's python script (" << user_script_name << ").\n"; @@ -509,8 +724,14 @@ if ( ! module_obj ) { // -process_python_point_data(module_obj, met_pd_out); - +PyObject *met_point_data = get_python_object(module_obj, python_key_point_data); +if ( met_point_data ) { + process_point_data(met_point_data, met_pd_out); +} +else { + PyObject *point_data = get_python_object(module_obj, python_key_point_data_list); + process_point_data_list(point_data, met_pd_out, filters); +} // // cleanup diff --git a/src/libcode/vx_pointdata_python/python_pointdata.h b/src/libcode/vx_pointdata_python/python_pointdata.h index 5bfb87ca2e..284421b17c 100644 --- a/src/libcode/vx_pointdata_python/python_pointdata.h +++ b/src/libcode/vx_pointdata_python/python_pointdata.h @@ -16,6 +16,7 @@ //////////////////////////////////////////////////////////////////////// +#include "mask_filters.h" #include "met_point_data.h" @@ -29,6 +30,7 @@ extern "C" { //////////////////////////////////////////////////////////////////////// static const char python_key_point_data [] = "met_point_data"; +static const char python_key_point_data_list[] = "point_data"; static const char python_key_nhdr [] = "nhdr"; //static const char python_key_npbhdr [] = "npbhdr"; @@ -62,8 +64,10 @@ static const int point_data_debug_level = 10; //////////////////////////////////////////////////////////////////////// -extern bool python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out); +extern bool python_point_data(const char * script_name, int script_argc, + char ** script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters); + //extern bool python_point_data(const char *python_command, const bool use_xarray, // MetPointData & po_out); extern void print_met_data(MetPointObsData *obs_data, MetPointHeader *header_data, diff --git a/src/libcode/vx_python3_utils/python3_script.cc b/src/libcode/vx_python3_utils/python3_script.cc index e34fb3ed7a..34fc038d71 100644 --- a/src/libcode/vx_python3_utils/python3_script.cc +++ b/src/libcode/vx_python3_utils/python3_script.cc @@ -26,7 +26,7 @@ using namespace std; static const char sq = '\''; // single quote -static const char read_tmp_ascii_py [] = "MET_BASE/wrappers/read_tmp_ascii.py"; +static const char read_tmp_ascii_py [] = "MET_BASE/python/pyembed/read_tmp_ascii.py"; //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/other/ascii2nc/file_handler.cc b/src/tools/other/ascii2nc/file_handler.cc index f2a062aa7f..74020804f0 100644 --- a/src/tools/other/ascii2nc/file_handler.cc +++ b/src/tools/other/ascii2nc/file_handler.cc @@ -53,14 +53,6 @@ FileHandler::FileHandler(const string &program_name) : _nhdr(0), _hdrNum(0), _obsNum(0), - _gridMaskNum(0), - _areaMaskNum(0), - _polyMaskNum(0), - _sidMaskNum(0), - _gridMask(0), - _areaMask(0), - _polyMask(0), - _sidMask(0), use_var_id(false), do_monitor(false), deflate_level(DEF_DEFLATE_LEVEL), @@ -121,11 +113,11 @@ bool FileHandler::writeNetcdfFile(const string &nc_filename) // List the number of rejected observations. mlog << Debug(2) - << "Rejected " << _gridMaskNum + << "Rejected " << filters.get_grid_mask_cnt() << " observations off the masking grid.\n" - << "Rejected " << _areaMaskNum + _polyMaskNum + << "Rejected " << filters.get_area_mask_cnt() + filters.get_poly_mask_cnt() << " observations outside the masking polyline.\n" - << "Rejected " << _sidMaskNum + << "Rejected " << filters.get_sid_mask_cnt() << " observations not matched with station ID's.\n"; // Loop through the observations, counting the number of headers needed in @@ -274,52 +266,14 @@ bool FileHandler::_addObservations(const Observation &obs) double grid_x, grid_y; // - // Apply the grid mask + // Apply the grid mask, the area mask, and the polyline mask // - if(_gridMask) { - _gridMask->latlon_to_xy(obs.getLatitude(), -1.0*obs.getLongitude(), - grid_x, grid_y); - - if(grid_x < 0 || grid_x >= _gridMask->nx() || - grid_y < 0 || grid_y >= _gridMask->ny()) { - _gridMaskNum++; - return false; - } - - // - // Apply the area mask - // - if(_areaMask) { - if(!_areaMask->s_is_on(nint(grid_x), nint(grid_y))) { - _areaMaskNum++; - return false; - } - } - } - - // - // Apply the polyline mask - // - if(_polyMask) - { - if(!_polyMask->latlon_is_inside_dege(obs.getLatitude(), obs.getLongitude())) - { - _polyMaskNum++; - return false; - } - } + if(filters.is_filtered(obs.getLatitude(), obs.getLongitude())) return false; // // Apply the station ID mask // - if(_sidMask) - { - if(!_sidMask->has(obs.getStationId().c_str())) - { - _sidMaskNum++; - return false; - } - } + if(filters.is_filtered_sid(obs.getStationId().c_str())) return false; // Save obs because the obs vector is sorted after time summary _observations.push_back(obs); diff --git a/src/tools/other/ascii2nc/file_handler.h b/src/tools/other/ascii2nc/file_handler.h index 006f965d3e..ece575672a 100644 --- a/src/tools/other/ascii2nc/file_handler.h +++ b/src/tools/other/ascii2nc/file_handler.h @@ -25,6 +25,7 @@ #include #include "mask_poly.h" +#include "mask_filters.h" #include "vx_grid.h" #include "vx_config.h" #include "vx_util.h" @@ -92,15 +93,7 @@ class FileHandler int _hdrNum; int _obsNum; - int _gridMaskNum; - int _areaMaskNum; - int _polyMaskNum; - int _sidMaskNum; - - Grid *_gridMask; - MaskPlane *_areaMask; - MaskPoly *_polyMask; - StringArray *_sidMask; + MaskFilters filters; map _messageTypeMap; @@ -149,20 +142,14 @@ class FileHandler void _closeNetcdf(); bool _openNetcdf(const string &nc_filename); -// bool _writeHdrInfo(const ConcatString &hdr_typ, -// const ConcatString &hdr_sid, -// const time_t hdr_vld, -// double lat, double lon, double elv); -// bool _writeObsInfo(int gc, float prs, float hgt, float obs, -// const ConcatString &qty); void debug_print_observations(vector< Observation >, string); }; inline void FileHandler::setCompressionLevel(int compressoion_level) { deflate_level = compressoion_level; } -inline void FileHandler::setGridMask(Grid &g) { _gridMask = &g; } -inline void FileHandler::setAreaMask(MaskPlane &a) { _areaMask = &a; } -inline void FileHandler::setPolyMask(MaskPoly &p) { _polyMask = &p; } -inline void FileHandler::setSIDMask (StringArray &s) { _sidMask = &s; } +inline void FileHandler::setGridMask(Grid &g) { filters.set_grid_mask(&g); } +inline void FileHandler::setAreaMask(MaskPlane &a) { filters.set_area_mask(&a); } +inline void FileHandler::setPolyMask(MaskPoly &p) { filters.set_poly_mask(&p); } +inline void FileHandler::setSIDMask (StringArray &s) { filters.set_sid_mask(&s); } inline void FileHandler::setMessageTypeMap(map m) { _messageTypeMap = m; } diff --git a/src/tools/other/ascii2nc/python_handler.cc b/src/tools/other/ascii2nc/python_handler.cc index 3a68e11da0..a5a55e0f94 100644 --- a/src/tools/other/ascii2nc/python_handler.cc +++ b/src/tools/other/ascii2nc/python_handler.cc @@ -18,6 +18,7 @@ using namespace std; #include "vx_log.h" #include "vx_math.h" +#include "python_line.h" #include "vx_python3_utils.h" #include "python_handler.h" @@ -25,10 +26,7 @@ using namespace std; //////////////////////////////////////////////////////////////////////// - -static const char set_python_env_wrapper [] = "set_python_env"; - -static const char write_tmp_ascii_wrapper[] = "MET_BASE/wrappers/write_tmp_point.py"; +static const char write_tmp_ascii_wrapper[] = "MET_BASE/python/pyembed/write_tmp_point.py"; static const char list_name [] = "point_data"; @@ -244,9 +242,7 @@ bool PythonHandler::do_straight() { -ConcatString command, path, user_base; - -path = set_python_env_wrapper; +ConcatString command, user_base; mlog << Debug(3) << "Running user's python script (" @@ -260,7 +256,7 @@ user_base.chomp(".py"); // start up the python interpreter // -Python3_Script script(path.text()); +Python3_Script *script = get_python3_script(); // // set up a "new" sys.argv list @@ -268,7 +264,7 @@ Python3_Script script(path.text()); // the user's script // -script.reset_argv(user_script_filename.text(), user_script_args); +script->reset_argv(user_script_filename.text(), user_script_args); // // import the user's script as a module @@ -377,20 +373,16 @@ if ( status ) { } -ConcatString wrapper; - -wrapper = set_python_env_wrapper; - -Python3_Script script(wrapper.text()); +Python3_Script *script = get_python3_script(); mlog << Debug(4) << "Reading temporary Python ascii observation file: " << tmp_ascii_path << "\n"; -script.import_read_tmp_ascii_py(); +script->import_read_tmp_ascii_py(); -PyObject * dobj = script.read_tmp_ascii(tmp_ascii_path.text()); +PyObject * dobj = script->read_tmp_ascii(tmp_ascii_path.text()); -PyObject * obj = script.lookup_ascii(tmp_list_name); +PyObject * obj = script->lookup_ascii(tmp_list_name); if ( ! PyList_Check(obj) ) {