From ff9415498120edde5bc358ccf43e6704794c79bc Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 9 Mar 2023 16:46:19 -0700 Subject: [PATCH 01/81] #2285 Read temporary file as CSV if failas as NetCDF to support point_data variable --- data/wrappers/read_tmp_point_nc.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/data/wrappers/read_tmp_point_nc.py b/data/wrappers/read_tmp_point_nc.py index 0ef8eefc3a..774738a7c7 100644 --- a/data/wrappers/read_tmp_point_nc.py +++ b/data/wrappers/read_tmp_point_nc.py @@ -18,9 +18,14 @@ netcdf_filename = sys.argv[1] # read NetCDF file -print('{p} reading{f}'.format(p=met_point_obs.get_prompt(), f=netcdf_filename)) -point_obs_data = nc_point_obs() -point_obs_data.read_data(netcdf_filename) +print('{p} reading {f}'.format(p=met_point_obs.get_prompt(), f=netcdf_filename)) +try: + point_obs_data = nc_point_obs() + point_obs_data.read_data(netcdf_filename) -met_point_data = point_obs_data.get_point_data() -met_point_data['met_point_data'] = point_obs_data + met_point_data = point_obs_data.get_point_data() + met_point_data['met_point_data'] = point_obs_data +except: + from read_tmp_ascii import read_tmp_ascii + + point_data = read_tmp_ascii(netcdf_filename) From 1622220b741b48bb8f64eb685c0c7dd44255c325 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 9 Mar 2023 16:47:27 -0700 Subject: [PATCH 02/81] #2285 Write a temporary file as CSV if point_data variable exists --- data/wrappers/write_tmp_point_nc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/data/wrappers/write_tmp_point_nc.py b/data/wrappers/write_tmp_point_nc.py index 063a2e98cc..35ca014fd7 100644 --- a/data/wrappers/write_tmp_point_nc.py +++ b/data/wrappers/write_tmp_point_nc.py @@ -42,7 +42,11 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -if hasattr(met_in, 'point_obs_data'): +if hasattr(met_in, 'point_data'): + from write_tmp_point import write_tmp_ascii + + write_tmp_ascii(tmp_filename, met_in.point_data) +elif hasattr(met_in, 'point_obs_data'): met_in.point_obs_data.save_ncfile(tmp_filename) else: if hasattr(met_in.met_point_data, 'point_obs_data'): From 1cfd0cea6c07fc1a1a2654fbc1b8d5bb58119d84 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 9 Mar 2023 16:49:10 -0700 Subject: [PATCH 03/81] #2285 Separeted write_tmp_ascii and make the main code not to be executed by importing --- data/wrappers/write_tmp_point.py | 47 ++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 21 deletions(-) diff --git a/data/wrappers/write_tmp_point.py b/data/wrappers/write_tmp_point.py index 916fca5549..a289a4c331 100644 --- a/data/wrappers/write_tmp_point.py +++ b/data/wrappers/write_tmp_point.py @@ -12,32 +12,37 @@ import sys import importlib.util -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] +def write_tmp_ascii(filename, point_data): + with open(filename, 'w') as f: + for line in point_data: + f.write(str(line) + '\n') -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), +if __name__ == '__main__': + print("Python Script:\t" + repr(sys.argv[0])) + print("User Command:\t" + repr(' '.join(sys.argv[2:]))) + print("Temporary File:\t" + repr(sys.argv[1])) + + tmp_filename = sys.argv[1] + pyembed_module_name = sys.argv[2] + sys.argv = sys.argv[2:] + + # add share/met/python directory to system path to find met_point_obs + sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'python'))) -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) + # append user script dir to system path + pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) + if pyembed_dir: + sys.path.insert(0, pyembed_dir) -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' + if not pyembed_module_name.endswith('.py'): + pyembed_module_name += '.py' -user_base = os.path.basename(pyembed_module_name).replace('.py','') + user_base = os.path.basename(pyembed_module_name).replace('.py','') -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) + spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) + met_in = importlib.util.module_from_spec(spec) + spec.loader.exec_module(met_in) -f = open(tmp_filename, 'w') -for line in met_in.point_data: - f.write(str(line) + '\n') + write_tmp_ascii(tmp_filename, met_in.point_data) From e4dc0681219483b605b324837d0da7dc4a06ef64 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Tue, 14 Mar 2023 10:16:01 -0600 Subject: [PATCH 04/81] #22285 Added mask_filters.h and mask_filters.cc --- src/libcode/vx_pointdata_python/Makefile.am | 1 + src/libcode/vx_pointdata_python/Makefile.in | 24 +++++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/libcode/vx_pointdata_python/Makefile.am b/src/libcode/vx_pointdata_python/Makefile.am index 5579afc6fe..4045badcde 100644 --- a/src/libcode/vx_pointdata_python/Makefile.am +++ b/src/libcode/vx_pointdata_python/Makefile.am @@ -12,6 +12,7 @@ include ${top_srcdir}/Make-include noinst_LIBRARIES = libvx_pointdata_python.a libvx_pointdata_python_a_SOURCES = \ + mask_filters.h mask_filters.cc \ pointdata_python.h pointdata_python.cc \ pointdata_from_array.h pointdata_from_array.cc pointdata_from_array.hpp \ python_pointdata.h python_pointdata.cc python_pointdata.hpp diff --git a/src/libcode/vx_pointdata_python/Makefile.in b/src/libcode/vx_pointdata_python/Makefile.in index a68210285d..8d36c6b212 100644 --- a/src/libcode/vx_pointdata_python/Makefile.in +++ b/src/libcode/vx_pointdata_python/Makefile.in @@ -108,6 +108,7 @@ am__v_AR_1 = libvx_pointdata_python_a_AR = $(AR) $(ARFLAGS) libvx_pointdata_python_a_LIBADD = am_libvx_pointdata_python_a_OBJECTS = \ + libvx_pointdata_python_a-mask_filters.$(OBJEXT) \ libvx_pointdata_python_a-pointdata_python.$(OBJEXT) \ libvx_pointdata_python_a-pointdata_from_array.$(OBJEXT) \ libvx_pointdata_python_a-python_pointdata.$(OBJEXT) @@ -129,6 +130,7 @@ DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__maybe_remake_depfiles = depfiles am__depfiles_remade = \ + ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po \ ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po @@ -340,6 +342,7 @@ MAINTAINERCLEANFILES = Makefile.in # The library noinst_LIBRARIES = libvx_pointdata_python.a libvx_pointdata_python_a_SOURCES = \ + mask_filters.h mask_filters.cc \ pointdata_python.h pointdata_python.cc \ pointdata_from_array.h pointdata_from_array.cc pointdata_from_array.hpp \ python_pointdata.h python_pointdata.cc python_pointdata.hpp @@ -393,6 +396,7 @@ mostlyclean-compile: distclean-compile: -rm -f *.tab.c +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po@am__quote@ # am--include-marker @@ -417,6 +421,20 @@ am--depfiles: $(am__depfiles_remade) @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` +libvx_pointdata_python_a-mask_filters.o: mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-mask_filters.o -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo -c -o libvx_pointdata_python_a-mask_filters.o `test -f 'mask_filters.cc' || echo '$(srcdir)/'`mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='mask_filters.cc' object='libvx_pointdata_python_a-mask_filters.o' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o libvx_pointdata_python_a-mask_filters.o `test -f 'mask_filters.cc' || echo '$(srcdir)/'`mask_filters.cc + +libvx_pointdata_python_a-mask_filters.obj: mask_filters.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-mask_filters.obj -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo -c -o libvx_pointdata_python_a-mask_filters.obj `if test -f 'mask_filters.cc'; then $(CYGPATH_W) 'mask_filters.cc'; else $(CYGPATH_W) '$(srcdir)/mask_filters.cc'; fi` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Tpo $(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='mask_filters.cc' object='libvx_pointdata_python_a-mask_filters.obj' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o libvx_pointdata_python_a-mask_filters.obj `if test -f 'mask_filters.cc'; then $(CYGPATH_W) 'mask_filters.cc'; else $(CYGPATH_W) '$(srcdir)/mask_filters.cc'; fi` + libvx_pointdata_python_a-pointdata_python.o: pointdata_python.cc @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libvx_pointdata_python_a_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT libvx_pointdata_python_a-pointdata_python.o -MD -MP -MF $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Tpo -c -o libvx_pointdata_python_a-pointdata_python.o `test -f 'pointdata_python.cc' || echo '$(srcdir)/'`pointdata_python.cc @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Tpo $(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po @@ -584,7 +602,8 @@ clean: clean-am clean-am: clean-generic clean-noinstLIBRARIES mostlyclean-am distclean: distclean-am - -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po -rm -f Makefile @@ -632,7 +651,8 @@ install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am - -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-mask_filters.Po + -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_from_array.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-pointdata_python.Po -rm -f ./$(DEPDIR)/libvx_pointdata_python_a-python_pointdata.Po -rm -f Makefile From 9e0fba7292a1c0c58dc9f0be44cde0424e02f9c4 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 23 Mar 2023 15:28:37 -0600 Subject: [PATCH 05/81] #2285 Moved files at scripts/utility to scripts/python/utility --- scripts/{ => python}/utility/build_ndbc_stations_from_web.py | 0 scripts/{ => python}/utility/print_pointnc2ascii.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename scripts/{ => python}/utility/build_ndbc_stations_from_web.py (100%) rename scripts/{ => python}/utility/print_pointnc2ascii.py (100%) diff --git a/scripts/utility/build_ndbc_stations_from_web.py b/scripts/python/utility/build_ndbc_stations_from_web.py similarity index 100% rename from scripts/utility/build_ndbc_stations_from_web.py rename to scripts/python/utility/build_ndbc_stations_from_web.py diff --git a/scripts/utility/print_pointnc2ascii.py b/scripts/python/utility/print_pointnc2ascii.py similarity index 100% rename from scripts/utility/print_pointnc2ascii.py rename to scripts/python/utility/print_pointnc2ascii.py From b252b8bbff605b4a6df7f750796c1a1a6726ffe3 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 23 Mar 2023 15:34:06 -0600 Subject: [PATCH 06/81] #2285 Moved files at scripts/utility to scripts/python/utility --- scripts/{ => python}/utility/Makefile.am | 0 scripts/{ => python}/utility/Makefile.in | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename scripts/{ => python}/utility/Makefile.am (100%) rename scripts/{ => python}/utility/Makefile.in (100%) diff --git a/scripts/utility/Makefile.am b/scripts/python/utility/Makefile.am similarity index 100% rename from scripts/utility/Makefile.am rename to scripts/python/utility/Makefile.am diff --git a/scripts/utility/Makefile.in b/scripts/python/utility/Makefile.in similarity index 100% rename from scripts/utility/Makefile.in rename to scripts/python/utility/Makefile.in From 47ad2009035fc1d2159c35764a501200fad4c764 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 23 Mar 2023 15:51:23 -0600 Subject: [PATCH 07/81] #2285 Moved files at data/wrappers to scripts/python/pyembed --- scripts/python/{met_point_obs.py => met/point.py} | 0 {data/wrappers => scripts/python/pyembed}/Makefile.am | 0 {data/wrappers => scripts/python/pyembed}/Makefile.in | 0 {data/wrappers => scripts/python/pyembed}/read_tmp_ascii.py | 0 {data/wrappers => scripts/python/pyembed}/read_tmp_dataplane.py | 0 {data/wrappers => scripts/python/pyembed}/read_tmp_point_nc.py | 0 {data/wrappers => scripts/python/pyembed}/set_python_env.py | 0 {data/wrappers => scripts/python/pyembed}/write_tmp_dataplane.py | 0 {data/wrappers => scripts/python/pyembed}/write_tmp_mpr.py | 0 {data/wrappers => scripts/python/pyembed}/write_tmp_point.py | 0 {data/wrappers => scripts/python/pyembed}/write_tmp_point_nc.py | 0 11 files changed, 0 insertions(+), 0 deletions(-) rename scripts/python/{met_point_obs.py => met/point.py} (100%) rename {data/wrappers => scripts/python/pyembed}/Makefile.am (100%) rename {data/wrappers => scripts/python/pyembed}/Makefile.in (100%) rename {data/wrappers => scripts/python/pyembed}/read_tmp_ascii.py (100%) rename {data/wrappers => scripts/python/pyembed}/read_tmp_dataplane.py (100%) rename {data/wrappers => scripts/python/pyembed}/read_tmp_point_nc.py (100%) rename {data/wrappers => scripts/python/pyembed}/set_python_env.py (100%) rename {data/wrappers => scripts/python/pyembed}/write_tmp_dataplane.py (100%) rename {data/wrappers => scripts/python/pyembed}/write_tmp_mpr.py (100%) rename {data/wrappers => scripts/python/pyembed}/write_tmp_point.py (100%) rename {data/wrappers => scripts/python/pyembed}/write_tmp_point_nc.py (100%) diff --git a/scripts/python/met_point_obs.py b/scripts/python/met/point.py similarity index 100% rename from scripts/python/met_point_obs.py rename to scripts/python/met/point.py diff --git a/data/wrappers/Makefile.am b/scripts/python/pyembed/Makefile.am similarity index 100% rename from data/wrappers/Makefile.am rename to scripts/python/pyembed/Makefile.am diff --git a/data/wrappers/Makefile.in b/scripts/python/pyembed/Makefile.in similarity index 100% rename from data/wrappers/Makefile.in rename to scripts/python/pyembed/Makefile.in diff --git a/data/wrappers/read_tmp_ascii.py b/scripts/python/pyembed/read_tmp_ascii.py similarity index 100% rename from data/wrappers/read_tmp_ascii.py rename to scripts/python/pyembed/read_tmp_ascii.py diff --git a/data/wrappers/read_tmp_dataplane.py b/scripts/python/pyembed/read_tmp_dataplane.py similarity index 100% rename from data/wrappers/read_tmp_dataplane.py rename to scripts/python/pyembed/read_tmp_dataplane.py diff --git a/data/wrappers/read_tmp_point_nc.py b/scripts/python/pyembed/read_tmp_point_nc.py similarity index 100% rename from data/wrappers/read_tmp_point_nc.py rename to scripts/python/pyembed/read_tmp_point_nc.py diff --git a/data/wrappers/set_python_env.py b/scripts/python/pyembed/set_python_env.py similarity index 100% rename from data/wrappers/set_python_env.py rename to scripts/python/pyembed/set_python_env.py diff --git a/data/wrappers/write_tmp_dataplane.py b/scripts/python/pyembed/write_tmp_dataplane.py similarity index 100% rename from data/wrappers/write_tmp_dataplane.py rename to scripts/python/pyembed/write_tmp_dataplane.py diff --git a/data/wrappers/write_tmp_mpr.py b/scripts/python/pyembed/write_tmp_mpr.py similarity index 100% rename from data/wrappers/write_tmp_mpr.py rename to scripts/python/pyembed/write_tmp_mpr.py diff --git a/data/wrappers/write_tmp_point.py b/scripts/python/pyembed/write_tmp_point.py similarity index 100% rename from data/wrappers/write_tmp_point.py rename to scripts/python/pyembed/write_tmp_point.py diff --git a/data/wrappers/write_tmp_point_nc.py b/scripts/python/pyembed/write_tmp_point_nc.py similarity index 100% rename from data/wrappers/write_tmp_point_nc.py rename to scripts/python/pyembed/write_tmp_point_nc.py From fd184acf5b025140182fb3c3172fb603a42cbc14 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:16:23 -0600 Subject: [PATCH 08/81] #2285 Moved python script from python to python/examples --- scripts/python/derive_WRF_semilatlon.py | 101 ------------------------ scripts/python/read_ascii_mpr.py | 33 -------- scripts/python/read_ascii_numpy.py | 75 ------------------ scripts/python/read_ascii_numpy_grid.py | 51 ------------ scripts/python/read_ascii_point.py | 48 ----------- scripts/python/read_ascii_xarray.py | 93 ---------------------- 6 files changed, 401 deletions(-) delete mode 100644 scripts/python/derive_WRF_semilatlon.py delete mode 100755 scripts/python/read_ascii_mpr.py delete mode 100755 scripts/python/read_ascii_numpy.py delete mode 100755 scripts/python/read_ascii_numpy_grid.py delete mode 100755 scripts/python/read_ascii_point.py delete mode 100755 scripts/python/read_ascii_xarray.py diff --git a/scripts/python/derive_WRF_semilatlon.py b/scripts/python/derive_WRF_semilatlon.py deleted file mode 100644 index d42cfc66c3..0000000000 --- a/scripts/python/derive_WRF_semilatlon.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import sys -import numpy as np -import datetime as dt -from netCDF4 import Dataset,chartostring - -########################################### - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 4: - print("Must specify exactly one input file, variable name, and summary axis (lat, lon, latlon).") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -var_name = sys.argv[2] -axis = sys.argv[3] - -try: - # Print some output to verify that this script ran - print("Input File: " + repr(input_file)) - print("Variable: " + repr(var_name)) - print("Axis: " + repr(axis)) - - # Read input file - f = Dataset(input_file, 'r') - - data = np.float64(f.variables[var_name][0,:,:,:]) - data[data > 1.0e30] = np.nan - - pvals = list(np.float64(f.variables["pressure"][:])) - - if axis == "lon": - met_data = np.nanmean(data[::-1], axis=1).copy() - elif axis == "lat": - met_data = np.nanmean(data[::-1], axis=2).transpose().copy() - elif axis == "latlon": - met_data = np.nanmean(data[::-1], axis=1).copy() - else: - print("ERROR: Unsupported axis type: " + axis) - sys.exit(1) - - print("Data Shape: " + repr(met_data.shape)) - print("Data Type: " + repr(met_data.dtype)) -except NameError: - print("Trouble reading data from input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -init = dt.datetime.strptime(getattr(f, "START_DATE"), "%Y-%m-%d_%H:%M:%S") -valid_ref = dt.datetime.strptime(getattr(f.variables["Time"], "units"), "hours since %Y-%m-%d %H:%M:%S") -add_hours = float(f.variables["Time"][:]) -valid = valid_ref + dt.timedelta(hours=add_hours) -lead, rem = divmod((valid-init).total_seconds(), 3600) -accum = "00" - - # Use the first column of lats - -if axis == "lon": - lats = list() - lons = list(np.float64(f.variables["XLONG"][0,0,:])) -elif axis == "lat": - lats = list(np.float64(f.variables["XLAT"][0,:,0])) - lons = list() -elif axis == "latlon": - lats = list(np.float64(f.variables["XLONG"][0,0,:])) - lons = list(np.float64(f.variables["XLAT"][0,0,:])) - -levels = list(pvals) -times = list() - -attrs = { - 'valid': valid.strftime("%Y%m%d_%H%M%S"), - 'init': init.strftime("%Y%m%d_%H%M%S"), - 'lead': str(int(lead)), - 'accum': accum, - - 'name': var_name, - 'long_name': str(getattr(f.variables[var_name], "description")), - 'level': axis + "_mean", - 'units': str(getattr(f.variables[var_name], "units")), - - 'grid': { - 'type' : "SemiLatLon", - 'name' : axis + "_mean", - 'lats' : lats, - 'lons' : lons, - 'levels' : levels, - 'times' : times - } -} - -print("Attributes: " + repr(attrs)) diff --git a/scripts/python/read_ascii_mpr.py b/scripts/python/read_ascii_mpr.py deleted file mode 100755 index fa71b8e6d2..0000000000 --- a/scripts/python/read_ascii_mpr.py +++ /dev/null @@ -1,33 +0,0 @@ -import pandas as pd -import os -import sys - -######################################################################## - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 2: - print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -try: - print("Input File:\t" + repr(input_file)) - - # Read MPR lines, skipping the header row and first column. - mpr_data = pd.read_csv(input_file, header=None, - delim_whitespace=True, keep_default_na=False, - skiprows=1, usecols=range(1,37), - dtype=str).values.tolist() - print("Data Length:\t" + repr(len(mpr_data))) - print("Data Type:\t" + repr(type(mpr_data))) -except NameError: - print("Can't find the input file") - -######################################################################## diff --git a/scripts/python/read_ascii_numpy.py b/scripts/python/read_ascii_numpy.py deleted file mode 100755 index 6d129afc1c..0000000000 --- a/scripts/python/read_ascii_numpy.py +++ /dev/null @@ -1,75 +0,0 @@ -import numpy as np -import os -import sys - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - -} - -print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/read_ascii_numpy_grid.py b/scripts/python/read_ascii_numpy_grid.py deleted file mode 100755 index 3e4cc25f69..0000000000 --- a/scripts/python/read_ascii_numpy_grid.py +++ /dev/null @@ -1,51 +0,0 @@ -import numpy as np -import os -import sys - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - 'grid': os.path.expandvars(os.getenv('PYTHON_GRID')) -} - -print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/read_ascii_point.py b/scripts/python/read_ascii_point.py deleted file mode 100755 index 7fb8eb076a..0000000000 --- a/scripts/python/read_ascii_point.py +++ /dev/null @@ -1,48 +0,0 @@ -import pandas as pd -import os -import sys -from met_point_obs import convert_point_data - -######################################################################## - -print("Python Script:\t" + repr(sys.argv[0])) - -## -## input file specified on the command line -## load the data into the numpy array -## - -if len(sys.argv) != 2: - print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -try: - print("Input File:\t" + repr(input_file)) - - # Read and format the input 11-column observations: - # (1) string: Message_Type - # (2) string: Station_ID - # (3) string: Valid_Time(YYYYMMDD_HHMMSS) - # (4) numeric: Lat(Deg North) - # (5) numeric: Lon(Deg East) - # (6) numeric: Elevation(msl) - # (7) string: Var_Name(or GRIB_Code) - # (8) numeric: Level - # (9) numeric: Height(msl or agl) - # (10) string: QC_String - # (11) numeric: Observation_Value - - point_data = pd.read_csv(input_file, header=None, delim_whitespace=True, keep_default_na=False, - names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], - dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() - print(" point_data: Data Length:\t" + repr(len(point_data))) - print(" point_data: Data Type:\t" + repr(type(point_data))) - met_point_data = convert_point_data(point_data) - print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) -except NameError: - print("Can't find the input file") - sys.exit(1) - -######################################################################## diff --git a/scripts/python/read_ascii_xarray.py b/scripts/python/read_ascii_xarray.py deleted file mode 100755 index 6e906863a7..0000000000 --- a/scripts/python/read_ascii_xarray.py +++ /dev/null @@ -1,93 +0,0 @@ -import numpy as np -import os -import sys -import xarray as xr - -########################################### - -print("Python Script:\t" + repr(sys.argv[0])) - - ## - ## input file specified on the command line - ## load the data into the numpy array - ## - -if len(sys.argv) != 3: - print("ERROR: read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) - -# Read the input file as the first argument -input_file = os.path.expandvars(sys.argv[1]) -data_name = sys.argv[2] -try: - # Print some output to verify that this script ran - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - print("Can't find the input file") - -########################################### - - ## - ## create the metadata dictionary - ## - -attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - -} - -print("Attributes:\t" + repr(attrs)) - -# Create an xarray DataArray object -da = xr.DataArray(met_data) -ds = xr.Dataset({"fcst":da}) - -# Add the attributes to the dataarray object -ds.attrs = attrs - -# Delete the local variable attrs to mimic the real world, -# where a user will rely on da.attrs rather than construct it themselves -del attrs - -# Delete the met_data variable, and reset it to be the Xarray object -del met_data - -# Create met_data and specify attrs because XR doesn't persist them. -met_data = xr.DataArray(ds.fcst, attrs=ds.attrs) From e04ebc886a25edfcd86be9a6640501aaadedaa49 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:17:19 -0600 Subject: [PATCH 09/81] #2285 Moved python script from python to python/examples --- .../python/examples/derive_WRF_semilatlon.py | 101 ++++++++++++++++++ scripts/python/examples/read_ascii_mpr.py | 33 ++++++ scripts/python/examples/read_ascii_numpy.py | 26 +++++ .../python/examples/read_ascii_numpy_grid.py | 29 +++++ scripts/python/examples/read_ascii_point.py | 65 +++++++++++ scripts/python/examples/read_ascii_xarray.py | 50 +++++++++ 6 files changed, 304 insertions(+) create mode 100644 scripts/python/examples/derive_WRF_semilatlon.py create mode 100644 scripts/python/examples/read_ascii_mpr.py create mode 100644 scripts/python/examples/read_ascii_numpy.py create mode 100644 scripts/python/examples/read_ascii_numpy_grid.py create mode 100644 scripts/python/examples/read_ascii_point.py create mode 100644 scripts/python/examples/read_ascii_xarray.py diff --git a/scripts/python/examples/derive_WRF_semilatlon.py b/scripts/python/examples/derive_WRF_semilatlon.py new file mode 100644 index 0000000000..d42cfc66c3 --- /dev/null +++ b/scripts/python/examples/derive_WRF_semilatlon.py @@ -0,0 +1,101 @@ +import os +import sys +import numpy as np +import datetime as dt +from netCDF4 import Dataset,chartostring + +########################################### + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 4: + print("Must specify exactly one input file, variable name, and summary axis (lat, lon, latlon).") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +var_name = sys.argv[2] +axis = sys.argv[3] + +try: + # Print some output to verify that this script ran + print("Input File: " + repr(input_file)) + print("Variable: " + repr(var_name)) + print("Axis: " + repr(axis)) + + # Read input file + f = Dataset(input_file, 'r') + + data = np.float64(f.variables[var_name][0,:,:,:]) + data[data > 1.0e30] = np.nan + + pvals = list(np.float64(f.variables["pressure"][:])) + + if axis == "lon": + met_data = np.nanmean(data[::-1], axis=1).copy() + elif axis == "lat": + met_data = np.nanmean(data[::-1], axis=2).transpose().copy() + elif axis == "latlon": + met_data = np.nanmean(data[::-1], axis=1).copy() + else: + print("ERROR: Unsupported axis type: " + axis) + sys.exit(1) + + print("Data Shape: " + repr(met_data.shape)) + print("Data Type: " + repr(met_data.dtype)) +except NameError: + print("Trouble reading data from input file") + +########################################### + + ## + ## create the metadata dictionary + ## + +init = dt.datetime.strptime(getattr(f, "START_DATE"), "%Y-%m-%d_%H:%M:%S") +valid_ref = dt.datetime.strptime(getattr(f.variables["Time"], "units"), "hours since %Y-%m-%d %H:%M:%S") +add_hours = float(f.variables["Time"][:]) +valid = valid_ref + dt.timedelta(hours=add_hours) +lead, rem = divmod((valid-init).total_seconds(), 3600) +accum = "00" + + # Use the first column of lats + +if axis == "lon": + lats = list() + lons = list(np.float64(f.variables["XLONG"][0,0,:])) +elif axis == "lat": + lats = list(np.float64(f.variables["XLAT"][0,:,0])) + lons = list() +elif axis == "latlon": + lats = list(np.float64(f.variables["XLONG"][0,0,:])) + lons = list(np.float64(f.variables["XLAT"][0,0,:])) + +levels = list(pvals) +times = list() + +attrs = { + 'valid': valid.strftime("%Y%m%d_%H%M%S"), + 'init': init.strftime("%Y%m%d_%H%M%S"), + 'lead': str(int(lead)), + 'accum': accum, + + 'name': var_name, + 'long_name': str(getattr(f.variables[var_name], "description")), + 'level': axis + "_mean", + 'units': str(getattr(f.variables[var_name], "units")), + + 'grid': { + 'type' : "SemiLatLon", + 'name' : axis + "_mean", + 'lats' : lats, + 'lons' : lons, + 'levels' : levels, + 'times' : times + } +} + +print("Attributes: " + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_mpr.py b/scripts/python/examples/read_ascii_mpr.py new file mode 100644 index 0000000000..fa71b8e6d2 --- /dev/null +++ b/scripts/python/examples/read_ascii_mpr.py @@ -0,0 +1,33 @@ +import pandas as pd +import os +import sys + +######################################################################## + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 2: + print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +try: + print("Input File:\t" + repr(input_file)) + + # Read MPR lines, skipping the header row and first column. + mpr_data = pd.read_csv(input_file, header=None, + delim_whitespace=True, keep_default_na=False, + skiprows=1, usecols=range(1,37), + dtype=str).values.tolist() + print("Data Length:\t" + repr(len(mpr_data))) + print("Data Type:\t" + repr(type(mpr_data))) +except NameError: + print("Can't find the input file") + +######################################################################## diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py new file mode 100644 index 0000000000..eaccb64f2d --- /dev/null +++ b/scripts/python/examples/read_ascii_numpy.py @@ -0,0 +1,26 @@ +import os +import sys +from met.dataplane import load_txt, get_grid_metadata + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +met_data = load_txt(input_file, data_name) + +attrs = get_grid_metadata(data_name) + +print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_numpy_grid.py b/scripts/python/examples/read_ascii_numpy_grid.py new file mode 100644 index 0000000000..c3bb071616 --- /dev/null +++ b/scripts/python/examples/read_ascii_numpy_grid.py @@ -0,0 +1,29 @@ +import os +import sys +from met.dataplane import load_txt, get_grid_metadata_from_env + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +met_data = load_txt(input_file, data_name) + +## create the metadata dictionary from the environment variable, +## Default env_name = 'PYTHON_GRID' + +attrs = get_grid_metadata_from_env(data_name) + +print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_point.py b/scripts/python/examples/read_ascii_point.py new file mode 100644 index 0000000000..bc20a5beeb --- /dev/null +++ b/scripts/python/examples/read_ascii_point.py @@ -0,0 +1,65 @@ +import pandas as pd +import os +import sys + +sys.path.append(os.path.abspath(os.path.dirname(__file__))) # for standalone +from met_point_obs import convert_point_data + +######################################################################## + +print("Python Script:\t" + repr(sys.argv[0])) + +## +## input file specified on the command line +## load the data into the numpy array +## + +if len(sys.argv) < 2: + print("ERROR: read_ascii_point.py -> Missing an input file.") + sys.exit(1) + +do_convert_to_met_point_data = False +last_index = 2 +if len(sys.argv) > last_index: + if sys.argv[2].lower() == "do_convert" or sys.argv[2].lower() == "convert": + do_convert_to_met_point_data = True + last_index += 1 + +if last_index < len(sys.argv): + print(" INFO: read_ascii_point.py -> Too many argument, ignored {a}.".format( + a=' '.join(sys.argv[last_index:]))) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +try: + print("Input File:\t" + repr(input_file)) + + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + + point_data = pd.read_csv(input_file, header=None, delim_whitespace=True, keep_default_na=False, + names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], + dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + print(" point_data: Data Length:\t" + repr(len(point_data))) + print(" point_data: Data Type:\t" + repr(type(point_data))) + if do_convert_to_met_point_data: + met_point_data = convert_point_data(point_data) + print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) +except FileNotFoundError: + print(f"The input file {input_file} does not exist") + sys.exit(1) +except Name`Error: + print(f"Can't find the input file {input_file}") + sys.exit(1) + +######################################################################## diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py new file mode 100644 index 0000000000..bc2fc6b589 --- /dev/null +++ b/scripts/python/examples/read_ascii_xarray.py @@ -0,0 +1,50 @@ +import os +import sys +import xarray as xr +from met.dataplane import load_txt, get_grid_metadata + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) != 3: + print("ERROR: read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +# Read the input file as the first argument +input_file = os.path.expandvars(sys.argv[1]) +data_name = sys.argv[2] + +met_data = load_txt(input_file, data_name) + +########################################### + + ## + ## create the metadata dictionary + ## + +attrs = get_grid_metadata(data_name) + +print("Attributes:\t" + repr(attrs)) + +# Create an xarray DataArray object +da = xr.DataArray(met_data) +ds = xr.Dataset({"fcst":da}) + +# Add the attributes to the dataarray object +ds.attrs = attrs + +# Delete the local variable attrs to mimic the real world, +# where a user will rely on da.attrs rather than construct it themselves +del attrs + +# Delete the met_data variable, and reset it to be the Xarray object +del met_data + +# Create met_data and specify attrs because XR doesn't persist them. +met_data = xr.DataArray(ds.fcst, attrs=ds.attrs) From 33c8194f8f996770298f3f736b5871edda07bbb1 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:18:23 -0600 Subject: [PATCH 10/81] #2285 Merged from met_point_obs_nc.py --- scripts/python/met/point.py | 281 ++++++++++++++++++++++++++++++++++++ 1 file changed, 281 insertions(+) mode change 100755 => 100644 scripts/python/met/point.py diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py old mode 100755 new mode 100644 index fb108705fd..7de2f2014f --- a/scripts/python/met/point.py +++ b/scripts/python/met/point.py @@ -25,6 +25,7 @@ import os from abc import ABC, abstractmethod import numpy as np +import pandas as pd COUNT_SHOW = 30 @@ -34,6 +35,30 @@ def get_prompt(): def met_is_python_prefix(user_cmd): return user_cmd.startswith(base_met_point_obs.python_prefix) +######################################################################## + +# Read the input file as the first argument +def read_ascii_point_obs(input_file, header=None, + delim_whitespace=True, keep_default_na=False): + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + ascii_point_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], + dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + return ascii_point_data + class base_met_point_obs(object): ''' @@ -610,6 +635,248 @@ def is_python_prefix(user_cmd): return user_cmd.startswith(base_met_point_obs.python_prefix) +# Note: caller should import netCDF4 +# the argements nc_group(dataset) and nc_var should not be None +class nc_tools(): + + met_missing = -99999999. + + @staticmethod + def get_num_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_var[:] + + @staticmethod + def get_ncbyte_array_to_str(nc_var): + nc_str_data = nc_var[:] + if nc_var.datatype.name == 'bytes8': + nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] + return nc_str_data + + @staticmethod + def get_string_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) + + +class nc_point_obs(met_point_obs): + + # args should be string, list, or dictionary + def get_nc_filename(self, args): + nc_filename = None + if isinstance(args, dict): + nc_filename = args.get('nc_name',None) + elif isinstance(args, list): + nc_filename = args[0] + elif args != ARG_PRINT_DATA: + nc_filename = args + + return nc_filename + + def read_data(self, nc_filename): + if nc_filename is None: + self.log_error_msg("The input NetCDF filename is missing") + elif not os.path.exists(nc_filename): + self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") + else: + dataset = nc.Dataset(nc_filename, 'r') + + attr_name = 'use_var_id' + use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" + self.use_var_id = use_var_id_str.lower() == 'true' + + # Header + self.hdr_typ = dataset['hdr_typ'][:] + self.hdr_sid = dataset['hdr_sid'][:] + self.hdr_vld = dataset['hdr_vld'][:] + self.hdr_lat = dataset['hdr_lat'][:] + self.hdr_lon = dataset['hdr_lon'][:] + self.hdr_elv = dataset['hdr_elv'][:] + self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') + self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') + self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') + + nc_var = dataset.variables.get('obs_unit', None) + if nc_var: + self.obs_var_unit = nc_var[:] + nc_var = dataset.variables.get('obs_desc', None) + if nc_var: + self.obs_var_desc = nc_var[:] + + nc_var = dataset.variables.get('hdr_prpt_typ', None) + if nc_var: + self.hdr_prpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_irpt_typ', None) + if nc_var: + self.hdr_irpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_inst_typ', None) + if nc_var: + self.hdr_inst_typ =nc_var[:] + + #Observation data + self.hdr_sid = dataset['hdr_sid'][:] + self.obs_qty = np.array(dataset['obs_qty'][:]) + self.obs_hid = np.array(dataset['obs_hid'][:]) + self.obs_lvl = np.array(dataset['obs_lvl'][:]) + self.obs_hgt = np.array(dataset['obs_hgt'][:]) + self.obs_val = np.array(dataset['obs_val'][:]) + nc_var = dataset.variables.get('obs_vid', None) + if nc_var is None: + self.use_var_id = False + nc_var = dataset.variables.get('obs_gc', None) + else: + self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') + if nc_var: + self.obs_vid = np.array(nc_var[:]) + + self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') + + def save_ncfile(self, nc_filename): + met_data = self.get_point_data() + with nc.Dataset(nc_filename, 'w') as nc_dataset: + self.set_nc_data(nc_dataset) + return met_data + + def set_nc_data(self, nc_dataset): + return nc_point_obs.write_nc_data(nc_dataset, self) + + @staticmethod + def write_nc_file(nc_filename, point_obs): + with nc.Dataset(nc_filename, 'w') as nc_dataset: + nc_point_obs.set_nc_data(nc_dataset, point_obs) + + @staticmethod + def write_nc_data(nc_dataset, point_obs): + do_nothing = False + if 0 == point_obs.nhdr: + do_nothing = True + base_met_point_obs.info_msg("the header is empty") + if 0 == point_obs.nobs: + do_nothing = True + base_met_point_obs.info_msg("the observation data is empty") + if do_nothing: + print() + return + + # Set global attributes + nc_dataset.MET_Obs_version = "1.02" ; + nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" + + # Create dimensions + nc_dataset.createDimension('mxstr', 16) + nc_dataset.createDimension('mxstr2', 40) + nc_dataset.createDimension('mxstr3', 80) + nc_dataset.createDimension('nhdr', point_obs.nhdr) + nc_dataset.createDimension('nobs', point_obs.nobs) + #npbhdr = len(point_obs.hdr_prpt_typ) + if 0 < point_obs.npbhdr: + nc_dataset.createDimension('npbhdr', point_obs.npbhdr) + nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) + nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) + nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) + nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) + nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) + + type_for_string = 'S1' # np.byte + dims_hdr = ('nhdr',) + dims_obs = ('nobs',) + + # Create header and observation variables + var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) + var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) + var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) + var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) + + var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) + var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) + var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) + var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) + var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) + var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) + + if 0 == point_obs.npbhdr: + var_hdr_prpt_typ = None + var_hdr_irpt_typ = None + var_hdr_inst_typ = None + else: + dims_npbhdr = ('npbhdr',) + var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) + + var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) + var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) + var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) + var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) + var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) + + # Set variables + var_hdr_typ[:] = point_obs.hdr_typ[:] + var_hdr_sid[:] = point_obs.hdr_sid[:] + var_hdr_vld[:] = point_obs.hdr_vld[:] + var_hdr_lat[:] = point_obs.hdr_lat[:] + var_hdr_lon[:] = point_obs.hdr_lon[:] + var_hdr_elv[:] = point_obs.hdr_elv[:] + for i in range(0, point_obs.nhdr_typ): + for j in range(0, len(point_obs.hdr_typ_table[i])): + var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] + for i in range(0, point_obs.nhdr_sid): + for j in range(0, len(point_obs.hdr_sid_table[i])): + var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] + for i in range(0, point_obs.nhdr_vld): + for j in range(0, len(point_obs.hdr_vld_table[i])): + var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] + if 0 < point_obs.npbhdr: + var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] + var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] + var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] + + var_obs_qty[:] = point_obs.obs_qty[:] + var_obs_hid[:] = point_obs.obs_hid[:] + var_obs_vid[:] = point_obs.obs_vid[:] + var_obs_lvl[:] = point_obs.obs_lvl[:] + var_obs_hgt[:] = point_obs.obs_hgt[:] + var_obs_val[:] = point_obs.obs_val[:] + for i in range(0, point_obs.nobs_var): + for j in range(0, len(point_obs.obs_var_table[i])): + var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] + var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] + var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] + for i in range(0, point_obs.nobs_qty): + for j in range(0, len(point_obs.obs_qty_table[i])): + var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] + + # Set variable attributes + var_hdr_typ.long_name = "index of message type" + var_hdr_sid.long_name = "index of station identification" + var_hdr_vld.long_name = "index of valid time" + var_hdr_lat.long_name = "latitude" + var_hdr_lat.units = "degrees_north" + var_hdr_lon.long_name = "longitude" + var_hdr_lon.units = "degrees_east" + var_hdr_elv.long_name = "elevation" + var_hdr_elv.units = "meters above sea level (msl)" + + var_obs_qty.long_name = "index of quality flag" + var_obs_hid.long_name = "index of matching header data" + var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" + var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" + var_obs_hgt.long_name = "height in meters above sea level (msl)" + var_obs_val.long_name = "observation value" + var_hdr_typ_table.long_name = "message type" + var_hdr_sid_table.long_name = "station identification" + var_hdr_vld_table.long_name = "valid time" + var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" + var_obs_qty_table.long_name = "quality flag" + var_obs_var_table.long_name = "variable names" + var_obs_var_unit.long_name = "variable units" + var_obs_var_desc.long_name = "variable descriptions" + + # This is a sample drived class class sample_met_point_obs(met_point_obs): @@ -655,6 +922,20 @@ def main(): point_obs_data.print_point_data(met_point_data, print_subset=False) +def main_nc(argv): + if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: + netcdf_filename = argv[1] + tmp_nc_name = 'tmp_met_point.nc' + point_obs_data = nc_point_obs() + point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) + met_point_data = point_obs_data.save_ncfile(tmp_nc_name) + print(f'{get_prompt()} saved met_point_data to {tmp_nc_name}') + met_point_data['met_point_data'] = point_obs_data + + if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: + met_point_obs.print_point_data(met_point_data) + + if __name__ == '__main__': main() print('Done python scripot') From c565ade1f692b7c80b0599b9395973657188378c Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:22:10 -0600 Subject: [PATCH 11/81] #2285 Initial release --- scripts/python/examples/Makefile.am | 33 ++ scripts/python/examples/Makefile.in | 521 ++++++++++++++++++++++++++++ 2 files changed, 554 insertions(+) create mode 100644 scripts/python/examples/Makefile.am create mode 100644 scripts/python/examples/Makefile.in diff --git a/scripts/python/examples/Makefile.am b/scripts/python/examples/Makefile.am new file mode 100644 index 0000000000..33e77c59e9 --- /dev/null +++ b/scripts/python/examples/Makefile.am @@ -0,0 +1,33 @@ +## Makefile.am -- Process this file with automake to produce Makefile.in +## Copyright (C) 2000, 2006 Gary V. Vaughan +## +## This program is free software; you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 2, or (at your option) +## any later version. +## +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; if not, write to the Free Software +## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +## MA 02110-1301, USA. + +# SUBDIRS = include + + +## Example of how to Install outside of $(pkgdatadir) +## scriptsrootdir = $(prefix)/share/scripts +## pythonscriptsdir = ${scriptsrootdir}/python + +pythonexamplesdir = $(pkgdatadir)/python/examples + +pythonexamples_DATA = \ + read_met_point_obs.py + +EXTRA_DIST = ${pythonexamples_DATA} + +MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/examples/Makefile.in b/scripts/python/examples/Makefile.in new file mode 100644 index 0000000000..fe45d0ef29 --- /dev/null +++ b/scripts/python/examples/Makefile.in @@ -0,0 +1,521 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# SUBDIRS = include + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = scripts/python/examples +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pythonexamplesdir)" +DATA = $(pythonexamples_DATA) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +am__DIST_COMMON = $(srcdir)/Makefile.in +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +pythonexamplesdir = $(pkgdatadir)/python/examples +pythonexamples_DATA = \ + read_met_point_obs.py + +EXTRA_DIST = ${pythonexamples_DATA} +MAINTAINERCLEANFILES = Makefile.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/examples/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign scripts/python/examples/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-pythonexamplesDATA: $(pythonexamples_DATA) + @$(NORMAL_INSTALL) + @list='$(pythonexamples_DATA)'; test -n "$(pythonexamplesdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pythonexamplesdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pythonexamplesdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonexamplesdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonexamplesdir)" || exit $$?; \ + done + +uninstall-pythonexamplesDATA: + @$(NORMAL_UNINSTALL) + @list='$(pythonexamples_DATA)'; test -n "$(pythonexamplesdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pythonexamplesdir)'; $(am__uninstall_files_from_dir) +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(DATA) +installdirs: + for dir in "$(DESTDIR)$(pythonexamplesdir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-pythonexamplesDATA + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-pythonexamplesDATA + +.MAKE: install-am install-strip + +.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ + ctags-am distclean distclean-generic distdir dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-pythonexamplesDATA \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ + uninstall-am uninstall-pythonexamplesDATA + +.PRECIOUS: Makefile + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: From e081ec77f7f6702b0594a4b420f003f8c1946a26 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:22:52 -0600 Subject: [PATCH 12/81] #2285 Initial release --- scripts/python/met/Makefile.am | 34 +++ scripts/python/met/Makefile.in | 523 ++++++++++++++++++++++++++++++++ scripts/python/met/dataplane.py | 80 +++++ scripts/python/met/mprbase.py | 16 + 4 files changed, 653 insertions(+) create mode 100644 scripts/python/met/Makefile.am create mode 100644 scripts/python/met/Makefile.in create mode 100644 scripts/python/met/dataplane.py create mode 100644 scripts/python/met/mprbase.py diff --git a/scripts/python/met/Makefile.am b/scripts/python/met/Makefile.am new file mode 100644 index 0000000000..4d02474315 --- /dev/null +++ b/scripts/python/met/Makefile.am @@ -0,0 +1,34 @@ +## Makefile.am -- Process this file with automake to produce Makefile.in +## Copyright (C) 2000, 2006 Gary V. Vaughan +## +## This program is free software; you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 2, or (at your option) +## any later version. +## +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; if not, write to the Free Software +## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +## MA 02110-1301, USA. + +# SUBDIRS = include + +## Example of how to Install outside of $(pkgdatadir) +## scriptsrootdir = $(prefix)/share/scripts +## pythonscriptsdir = ${scriptsrootdir}/python + +pythonmetscriptsdir = $(pkgdatadir)/python/met + +pythonmetscripts_DATA = \ + mprbase.py \ + dataplane.py \ + point.py + +EXTRA_DIST = ${pythonmetscripts_DATA} + +MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/met/Makefile.in b/scripts/python/met/Makefile.in new file mode 100644 index 0000000000..f0fa53e57b --- /dev/null +++ b/scripts/python/met/Makefile.in @@ -0,0 +1,523 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# SUBDIRS = include + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = scripts/python/met +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pythonmetscriptsdir)" +DATA = $(pythonmetscripts_DATA) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +am__DIST_COMMON = $(srcdir)/Makefile.in +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +pythonmetscriptsdir = $(pkgdatadir)/python/met +pythonmetscripts_DATA = \ + mprbase.py \ + dataplane.py \ + point.py + +EXTRA_DIST = ${pythonmetscripts_DATA} +MAINTAINERCLEANFILES = Makefile.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/met/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign scripts/python/met/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-pythonmetscriptsDATA: $(pythonmetscripts_DATA) + @$(NORMAL_INSTALL) + @list='$(pythonmetscripts_DATA)'; test -n "$(pythonmetscriptsdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pythonmetscriptsdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pythonmetscriptsdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonmetscriptsdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonmetscriptsdir)" || exit $$?; \ + done + +uninstall-pythonmetscriptsDATA: + @$(NORMAL_UNINSTALL) + @list='$(pythonmetscripts_DATA)'; test -n "$(pythonmetscriptsdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pythonmetscriptsdir)'; $(am__uninstall_files_from_dir) +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(DATA) +installdirs: + for dir in "$(DESTDIR)$(pythonmetscriptsdir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-pythonmetscriptsDATA + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-pythonmetscriptsDATA + +.MAKE: install-am install-strip + +.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ + ctags-am distclean distclean-generic distdir dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-pythonmetscriptsDATA \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ + uninstall-am uninstall-pythonmetscriptsDATA + +.PRECIOUS: Makefile + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py new file mode 100644 index 0000000000..b036a25c85 --- /dev/null +++ b/scripts/python/met/dataplane.py @@ -0,0 +1,80 @@ +import numpy as np +import os + +########################################### + +def load_txt(input_file, data_name) + try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + met_data = np.loadtxt(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) + except NameError: + met_data = None + print("Can't find the input file") + return met_data + +## +## create the metadata dictionary +## + +def get_grid_metadata(data_name): + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': data_name, + 'long_name': data_name + '_word', + 'level': 'Surface', + 'units': 'None', + + 'grid': { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, + } + + } + return attrs + +## +## create the metadata dictionary from the environment variable PYTHON_GRID +## + +def get_grid_metadata_from_env(data_name, grid_env_name='PYTHON_GRID'): + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': data_name, + 'long_name': data_name + '_word', + 'level': 'Surface', + 'units': 'None', + 'grid': os.path.expandvars(os.getenv(grid_env_name)) + } + return attrs diff --git a/scripts/python/met/mprbase.py b/scripts/python/met/mprbase.py new file mode 100644 index 0000000000..9ddc20c294 --- /dev/null +++ b/scripts/python/met/mprbase.py @@ -0,0 +1,16 @@ +import pandas as pd + +######################################################################## + +def read_mpr(input_file, usecols=range(1,37), header=None, + delim_whitespace=True, keep_default_na=False, + skiprows=1, dtype=str): + mpr_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + skiprows=skiprows, usecols=usecols, + dtype=dtype).values.tolist() + return mpr_data + + +######################################################################## From 9c84de7363f114e39aff318be444e2d59242353f Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 3 Apr 2023 16:50:57 -0600 Subject: [PATCH 13/81] #2285 Moved read_met_point_obs.py Initial release --- scripts/python/{ => examples}/read_met_point_obs.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename scripts/python/{ => examples}/read_met_point_obs.py (100%) diff --git a/scripts/python/read_met_point_obs.py b/scripts/python/examples/read_met_point_obs.py similarity index 100% rename from scripts/python/read_met_point_obs.py rename to scripts/python/examples/read_met_point_obs.py From a8240854e6d07587d237bb688c08ba1111f140cd Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Tue, 4 Apr 2023 11:10:12 -0600 Subject: [PATCH 14/81] #2285 Restructure python scripts --- Makefile.am | 2 +- Makefile.in | 5 +- configure | 8 +- configure.ac | 6 +- scripts/python/Makefile.am | 22 +- scripts/python/Makefile.in | 335 +++++++++++++++++++--------- scripts/python/examples/Makefile.am | 6 + scripts/python/examples/Makefile.in | 6 + scripts/python/pyembed/Makefile.am | 8 +- scripts/python/pyembed/Makefile.in | 46 ++-- scripts/python/utility/Makefile.am | 2 +- scripts/python/utility/Makefile.in | 8 +- 12 files changed, 296 insertions(+), 158 deletions(-) diff --git a/Makefile.am b/Makefile.am index f3c2b7a83a..a1e75367dd 100644 --- a/Makefile.am +++ b/Makefile.am @@ -18,7 +18,7 @@ ACLOCAL_AMFLAGS = -I m4 -SUBDIRS = data src scripts/Rscripts scripts/python scripts/utility +SUBDIRS = data src scripts/Rscripts scripts/python if ENABLE_DEVELOPMENT SUBDIRS += internal/test_util diff --git a/Makefile.in b/Makefile.in index 8236187480..aa3a1d74ff 100644 --- a/Makefile.in +++ b/Makefile.in @@ -158,7 +158,7 @@ ETAGS = etags CTAGS = ctags CSCOPE = cscope DIST_SUBDIRS = data src scripts/Rscripts scripts/python \ - scripts/utility internal/test_util + internal/test_util am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.h.in INSTALL \ compile config.guess config.sub depcomp install-sh missing \ ylwrap @@ -350,8 +350,7 @@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ ACLOCAL_AMFLAGS = -I m4 -SUBDIRS = data src scripts/Rscripts scripts/python scripts/utility \ - $(am__append_1) +SUBDIRS = data src scripts/Rscripts scripts/python $(am__append_1) MAINTAINERCLEANFILES = \ Makefile.in \ aclocal.m4 \ diff --git a/configure b/configure index d47929b214..87899cbbeb 100755 --- a/configure +++ b/configure @@ -8874,7 +8874,7 @@ done # Create configured files -ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/wrappers/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile" +ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/python/examples/Makefile scripts/python/met/Makefile scripts/python/pyembed/Makefile scripts/python/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile" if test -n "$MET_DEVELOPMENT"; then @@ -9762,7 +9762,10 @@ do "scripts/Rscripts/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/Rscripts/Makefile" ;; "scripts/Rscripts/include/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/Rscripts/include/Makefile" ;; "scripts/python/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/Makefile" ;; - "scripts/utility/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/utility/Makefile" ;; + "scripts/python/examples/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/examples/Makefile" ;; + "scripts/python/met/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/met/Makefile" ;; + "scripts/python/pyembed/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/pyembed/Makefile" ;; + "scripts/python/utility/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/utility/Makefile" ;; "data/Makefile") CONFIG_FILES="$CONFIG_FILES data/Makefile" ;; "data/climo/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/Makefile" ;; "data/climo/seeps/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/seeps/Makefile" ;; @@ -9774,7 +9777,6 @@ do "data/poly/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/Makefile" ;; "data/poly/HMT_masks/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/HMT_masks/Makefile" ;; "data/poly/NCEP_masks/Makefile") CONFIG_FILES="$CONFIG_FILES data/poly/NCEP_masks/Makefile" ;; - "data/wrappers/Makefile") CONFIG_FILES="$CONFIG_FILES data/wrappers/Makefile" ;; "data/ps/Makefile") CONFIG_FILES="$CONFIG_FILES data/ps/Makefile" ;; "data/table_files/Makefile") CONFIG_FILES="$CONFIG_FILES data/table_files/Makefile" ;; "data/tc_data/Makefile") CONFIG_FILES="$CONFIG_FILES data/tc_data/Makefile" ;; diff --git a/configure.ac b/configure.ac index 60cc07bcd7..f1561991ee 100644 --- a/configure.ac +++ b/configure.ac @@ -1204,7 +1204,10 @@ AC_CONFIG_FILES([Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile - scripts/utility/Makefile + scripts/python/examples/Makefile + scripts/python/met/Makefile + scripts/python/pyembed/Makefile + scripts/python/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile @@ -1216,7 +1219,6 @@ AC_CONFIG_FILES([Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile - data/wrappers/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile diff --git a/scripts/python/Makefile.am b/scripts/python/Makefile.am index 689708e4c3..c3b7b20042 100644 --- a/scripts/python/Makefile.am +++ b/scripts/python/Makefile.am @@ -18,6 +18,11 @@ # SUBDIRS = include +SUBDIRS = \ + examples \ + met \ + pyembed \ + utility ## Example of how to Install outside of $(pkgdatadir) ## scriptsrootdir = $(prefix)/share/scripts @@ -25,17 +30,10 @@ pythonscriptsdir = $(pkgdatadir)/python -pythonscripts_DATA = \ - met_point_obs.py \ - met_point_obs_nc.py \ - read_ascii_numpy.py \ - read_ascii_numpy_grid.py \ - read_ascii_xarray.py \ - read_ascii_point.py \ - read_ascii_mpr.py \ - read_met_point_obs.py \ - derive_WRF_semilatlon.py - -EXTRA_DIST = ${pythonscripts_DATA} +#EXTRA_DIST = ${top_DATA} \ +# sample_fcst \ +# sample_obs \ +# python \ +# copyright_notice.txt MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/Makefile.in b/scripts/python/Makefile.in index 6d85ed81f9..5ff5daed23 100644 --- a/scripts/python/Makefile.in +++ b/scripts/python/Makefile.in @@ -15,7 +15,6 @@ @SET_MAKE@ # SUBDIRS = include - VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ @@ -114,43 +113,74 @@ am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = +RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ + ctags-recursive dvi-recursive html-recursive info-recursive \ + install-data-recursive install-dvi-recursive \ + install-exec-recursive install-html-recursive \ + install-info-recursive install-pdf-recursive \ + install-ps-recursive install-recursive installcheck-recursive \ + installdirs-recursive pdf-recursive ps-recursive \ + tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; -am__install_max = 40 -am__nobase_strip_setup = \ - srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` -am__nobase_strip = \ - for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" -am__nobase_list = $(am__nobase_strip_setup); \ - for p in $$list; do echo "$$p $$p"; done | \ - sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ - $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ - if (++n[$$2] == $(am__install_max)) \ - { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ - END { for (dir in files) print dir, files[dir] }' -am__base_list = \ - sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ - sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' -am__uninstall_files_from_dir = { \ - test -z "$$files" \ - || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ - || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ - $(am__cd) "$$dir" && rm -f $$files; }; \ - } -am__installdirs = "$(DESTDIR)$(pythonscriptsdir)" -DATA = $(pythonscripts_DATA) +RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ + distclean-recursive maintainer-clean-recursive +am__recursive_targets = \ + $(RECURSIVE_TARGETS) \ + $(RECURSIVE_CLEAN_TARGETS) \ + $(am__extra_recursive_targets) +AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ + distdir distdir-am am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +DIST_SUBDIRS = $(SUBDIRS) am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +am__relativize = \ + dir0=`pwd`; \ + sed_first='s,^\([^/]*\)/.*$$,\1,'; \ + sed_rest='s,^[^/]*/*,,'; \ + sed_last='s,^.*/\([^/]*\)$$,\1,'; \ + sed_butlast='s,/*[^/]*$$,,'; \ + while test -n "$$dir1"; do \ + first=`echo "$$dir1" | sed -e "$$sed_first"`; \ + if test "$$first" != "."; then \ + if test "$$first" = ".."; then \ + dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ + dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ + else \ + first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ + if test "$$first2" = "$$first"; then \ + dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ + else \ + dir2="../$$dir2"; \ + fi; \ + dir0="$$dir0"/"$$first"; \ + fi; \ + fi; \ + dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ + done; \ + reldir="$$dir2" ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ @@ -296,21 +326,21 @@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ +SUBDIRS = \ + examples \ + met \ + pyembed \ + utility + pythonscriptsdir = $(pkgdatadir)/python -pythonscripts_DATA = \ - met_point_obs.py \ - met_point_obs_nc.py \ - read_ascii_numpy.py \ - read_ascii_numpy_grid.py \ - read_ascii_xarray.py \ - read_ascii_point.py \ - read_ascii_mpr.py \ - read_met_point_obs.py \ - derive_WRF_semilatlon.py - -EXTRA_DIST = ${pythonscripts_DATA} + +#EXTRA_DIST = ${top_DATA} \ +# sample_fcst \ +# sample_obs \ +# python \ +# copyright_notice.txt MAINTAINERCLEANFILES = Makefile.in -all: all-am +all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @@ -342,33 +372,105 @@ $(top_srcdir)/configure: $(am__configure_deps) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): -install-pythonscriptsDATA: $(pythonscripts_DATA) - @$(NORMAL_INSTALL) - @list='$(pythonscripts_DATA)'; test -n "$(pythonscriptsdir)" || list=; \ - if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(pythonscriptsdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(pythonscriptsdir)" || exit 1; \ - fi; \ - for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - echo "$$d$$p"; \ - done | $(am__base_list) | \ - while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonscriptsdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonscriptsdir)" || exit $$?; \ - done - -uninstall-pythonscriptsDATA: - @$(NORMAL_UNINSTALL) - @list='$(pythonscripts_DATA)'; test -n "$(pythonscriptsdir)" || list=; \ - files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(pythonscriptsdir)'; $(am__uninstall_files_from_dir) -tags TAGS: -ctags CTAGS: - -cscope cscopelist: +# This directory's subdirectories are mostly independent; you can cd +# into them and run 'make' without going through this Makefile. +# To change the values of 'make' variables: instead of editing Makefiles, +# (1) if the variable is set in 'config.status', edit 'config.status' +# (which will cause the Makefiles to be regenerated when you run 'make'); +# (2) otherwise, pass the desired values on the 'make' command line. +$(am__recursive_targets): + @fail=; \ + if $(am__make_keepgoing); then \ + failcom='fail=yes'; \ + else \ + failcom='exit 1'; \ + fi; \ + dot_seen=no; \ + target=`echo $@ | sed s/-recursive//`; \ + case "$@" in \ + distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ + *) list='$(SUBDIRS)' ;; \ + esac; \ + for subdir in $$list; do \ + echo "Making $$target in $$subdir"; \ + if test "$$subdir" = "."; then \ + dot_seen=yes; \ + local_target="$$target-am"; \ + else \ + local_target="$$target"; \ + fi; \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + || eval $$failcom; \ + done; \ + if test "$$dot_seen" = "no"; then \ + $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ + fi; test -z "$$fail" + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-recursive +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ + include_option=--etags-include; \ + empty_fix=.; \ + else \ + include_option=--include; \ + empty_fix=; \ + fi; \ + list='$(SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + test ! -f $$subdir/TAGS || \ + set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ + fi; \ + done; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-recursive + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-recursive + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) distdir-am @@ -403,22 +505,45 @@ distdir-am: $(DISTFILES) || exit 1; \ fi; \ done -check-am: all-am -check: check-am -all-am: Makefile $(DATA) -installdirs: - for dir in "$(DESTDIR)$(pythonscriptsdir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ + dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ + $(am__relativize); \ + new_distdir=$$reldir; \ + dir1=$$subdir; dir2="$(top_distdir)"; \ + $(am__relativize); \ + new_top_distdir=$$reldir; \ + echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ + echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ + ($(am__cd) $$subdir && \ + $(MAKE) $(AM_MAKEFLAGS) \ + top_distdir="$$new_top_distdir" \ + distdir="$$new_distdir" \ + am__remove_distdir=: \ + am__skip_length_check=: \ + am__skip_mode_fix=: \ + distdir) \ + || exit 1; \ + fi; \ done -install: install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am +check-am: all-am +check: check-recursive +all-am: Makefile +installdirs: installdirs-recursive +installdirs-am: +install: install-recursive +install-exec: install-exec-recursive +install-data: install-data-recursive +uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am -installcheck: installcheck-am +installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ @@ -441,85 +566,85 @@ maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) -clean: clean-am +clean: clean-recursive clean-am: clean-generic mostlyclean-am -distclean: distclean-am +distclean: distclean-recursive -rm -f Makefile -distclean-am: clean-am distclean-generic +distclean-am: clean-am distclean-generic distclean-tags -dvi: dvi-am +dvi: dvi-recursive dvi-am: -html: html-am +html: html-recursive html-am: -info: info-am +info: info-recursive info-am: -install-data-am: install-pythonscriptsDATA +install-data-am: -install-dvi: install-dvi-am +install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: -install-html: install-html-am +install-html: install-html-recursive install-html-am: -install-info: install-info-am +install-info: install-info-recursive install-info-am: install-man: -install-pdf: install-pdf-am +install-pdf: install-pdf-recursive install-pdf-am: -install-ps: install-ps-am +install-ps: install-ps-recursive install-ps-am: installcheck-am: -maintainer-clean: maintainer-clean-am +maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic -mostlyclean: mostlyclean-am +mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic -pdf: pdf-am +pdf: pdf-recursive pdf-am: -ps: ps-am +ps: ps-recursive ps-am: -uninstall-am: uninstall-pythonscriptsDATA +uninstall-am: -.MAKE: install-am install-strip +.MAKE: $(am__recursive_targets) install-am install-strip -.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ - ctags-am distclean distclean-generic distdir dvi dvi-am html \ - html-am info info-am install install-am install-data \ +.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ + check-am clean clean-generic cscopelist-am ctags ctags-am \ + distclean distclean-generic distclean-tags distdir dvi dvi-am \ + html html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ - install-ps install-ps-am install-pythonscriptsDATA \ - install-strip installcheck installcheck-am installdirs \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ - uninstall-am uninstall-pythonscriptsDATA + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs installdirs-am maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-generic pdf \ + pdf-am ps ps-am tags tags-am uninstall uninstall-am .PRECIOUS: Makefile diff --git a/scripts/python/examples/Makefile.am b/scripts/python/examples/Makefile.am index 33e77c59e9..e0461a3564 100644 --- a/scripts/python/examples/Makefile.am +++ b/scripts/python/examples/Makefile.am @@ -26,6 +26,12 @@ pythonexamplesdir = $(pkgdatadir)/python/examples pythonexamples_DATA = \ + derive_WRF_semilatlon.py \ + read_ascii_mpr.py \ + read_ascii_numpy_grid.py \ + read_ascii_numpy.py \ + read_ascii_point.py \ + read_ascii_xarray.py \ read_met_point_obs.py EXTRA_DIST = ${pythonexamples_DATA} diff --git a/scripts/python/examples/Makefile.in b/scripts/python/examples/Makefile.in index fe45d0ef29..ad4832e5a0 100644 --- a/scripts/python/examples/Makefile.in +++ b/scripts/python/examples/Makefile.in @@ -298,6 +298,12 @@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ pythonexamplesdir = $(pkgdatadir)/python/examples pythonexamples_DATA = \ + derive_WRF_semilatlon.py \ + read_ascii_mpr.py \ + read_ascii_numpy_grid.py \ + read_ascii_numpy.py \ + read_ascii_point.py \ + read_ascii_xarray.py \ read_met_point_obs.py EXTRA_DIST = ${pythonexamples_DATA} diff --git a/scripts/python/pyembed/Makefile.am b/scripts/python/pyembed/Makefile.am index deb919438e..9b69b8d376 100644 --- a/scripts/python/pyembed/Makefile.am +++ b/scripts/python/pyembed/Makefile.am @@ -18,10 +18,10 @@ SUBDIRS = -wrappersdir = $(pkgdatadir)/wrappers +pyembeddir = $(pkgdatadir)/python/pyembed -wrappers_DATA = \ - set_python_env.py \ +pyembed_DATA = \ + python_embedding.py \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ @@ -30,6 +30,6 @@ wrappers_DATA = \ write_tmp_point_nc.py \ write_tmp_mpr.py -EXTRA_DIST = ${wrappers_DATA} +EXTRA_DIST = ${pyembed_DATA} MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/pyembed/Makefile.in b/scripts/python/pyembed/Makefile.in index da04b2b2a0..a7f1932056 100644 --- a/scripts/python/pyembed/Makefile.in +++ b/scripts/python/pyembed/Makefile.in @@ -88,7 +88,7 @@ PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ -subdir = data/wrappers +subdir = scripts/python/pyembed ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ @@ -152,8 +152,8 @@ am__uninstall_files_from_dir = { \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } -am__installdirs = "$(DESTDIR)$(wrappersdir)" -DATA = $(wrappers_DATA) +am__installdirs = "$(DESTDIR)$(pyembeddir)" +DATA = $(pyembed_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ @@ -355,9 +355,9 @@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ SUBDIRS = -wrappersdir = $(pkgdatadir)/wrappers -wrappers_DATA = \ - set_python_env.py \ +pyembeddir = $(pkgdatadir)/python/pyembed +pyembed_DATA = \ + python_embedding.py \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ @@ -366,7 +366,7 @@ wrappers_DATA = \ write_tmp_point_nc.py \ write_tmp_mpr.py -EXTRA_DIST = ${wrappers_DATA} +EXTRA_DIST = ${pyembed_DATA} MAINTAINERCLEANFILES = Makefile.in all: all-recursive @@ -380,9 +380,9 @@ $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign data/wrappers/Makefile'; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/pyembed/Makefile'; \ $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign data/wrappers/Makefile + $(AUTOMAKE) --foreign scripts/python/pyembed/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ @@ -400,27 +400,27 @@ $(top_srcdir)/configure: $(am__configure_deps) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): -install-wrappersDATA: $(wrappers_DATA) +install-pyembedDATA: $(pyembed_DATA) @$(NORMAL_INSTALL) - @list='$(wrappers_DATA)'; test -n "$(wrappersdir)" || list=; \ + @list='$(pyembed_DATA)'; test -n "$(pyembeddir)" || list=; \ if test -n "$$list"; then \ - echo " $(MKDIR_P) '$(DESTDIR)$(wrappersdir)'"; \ - $(MKDIR_P) "$(DESTDIR)$(wrappersdir)" || exit 1; \ + echo " $(MKDIR_P) '$(DESTDIR)$(pyembeddir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pyembeddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ - echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(wrappersdir)'"; \ - $(INSTALL_DATA) $$files "$(DESTDIR)$(wrappersdir)" || exit $$?; \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pyembeddir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pyembeddir)" || exit $$?; \ done -uninstall-wrappersDATA: +uninstall-pyembedDATA: @$(NORMAL_UNINSTALL) - @list='$(wrappers_DATA)'; test -n "$(wrappersdir)" || list=; \ + @list='$(pyembed_DATA)'; test -n "$(pyembeddir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ - dir='$(DESTDIR)$(wrappersdir)'; $(am__uninstall_files_from_dir) + dir='$(DESTDIR)$(pyembeddir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. @@ -584,7 +584,7 @@ check: check-recursive all-am: Makefile $(DATA) installdirs: installdirs-recursive installdirs-am: - for dir in "$(DESTDIR)$(wrappersdir)"; do \ + for dir in "$(DESTDIR)$(pyembeddir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive @@ -638,7 +638,7 @@ info: info-recursive info-am: -install-data-am: install-wrappersDATA +install-data-am: install-pyembedDATA install-dvi: install-dvi-recursive @@ -682,7 +682,7 @@ ps: ps-recursive ps-am: -uninstall-am: uninstall-wrappersDATA +uninstall-am: uninstall-pyembedDATA .MAKE: $(am__recursive_targets) install-am install-strip @@ -693,11 +693,11 @@ uninstall-am: uninstall-wrappersDATA install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ - install-ps install-ps-am install-strip install-wrappersDATA \ + install-ps install-ps-am install-pyembedDATA install-strip \ installcheck installcheck-am installdirs installdirs-am \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic pdf pdf-am ps ps-am tags tags-am uninstall \ - uninstall-am uninstall-wrappersDATA + uninstall-am uninstall-pyembedDATA .PRECIOUS: Makefile diff --git a/scripts/python/utility/Makefile.am b/scripts/python/utility/Makefile.am index d807a69977..5efd02b01e 100644 --- a/scripts/python/utility/Makefile.am +++ b/scripts/python/utility/Makefile.am @@ -23,7 +23,7 @@ ## scriptsrootdir = $(prefix)/share/scripts ## pythonutilitydir = ${scriptsrootdir}/utility -pythonutilitydir = $(pkgdatadir)/utility +pythonutilitydir = $(pkgdatadir)/python/utility pythonutility_DATA = \ print_pointnc2ascii.py \ diff --git a/scripts/python/utility/Makefile.in b/scripts/python/utility/Makefile.in index bdaec7b3f9..7a994964d1 100644 --- a/scripts/python/utility/Makefile.in +++ b/scripts/python/utility/Makefile.in @@ -90,7 +90,7 @@ PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ -subdir = scripts/utility +subdir = scripts/python/utility ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ @@ -296,7 +296,7 @@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ -pythonutilitydir = $(pkgdatadir)/utility +pythonutilitydir = $(pkgdatadir)/python/utility pythonutility_DATA = \ print_pointnc2ascii.py \ build_ndbc_stations_from_web.py @@ -315,9 +315,9 @@ $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/utility/Makefile'; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/utility/Makefile'; \ $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign scripts/utility/Makefile + $(AUTOMAKE) --foreign scripts/python/utility/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ From 57f2b461dfb72ead0ea16f825f9f2c0a0a2e7804 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Tue, 4 Apr 2023 11:11:08 -0600 Subject: [PATCH 15/81] #2285 Restructure python scripts: moved to scripts/python/pyemded --- data/Makefile.am | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/data/Makefile.am b/data/Makefile.am index ac1483ba59..6cebff2c79 100644 --- a/data/Makefile.am +++ b/data/Makefile.am @@ -24,8 +24,7 @@ SUBDIRS = \ poly \ ps \ table_files \ - tc_data \ - wrappers + tc_data topdir = $(pkgdatadir) From 838929d74a585a9d1c88e193d7158a88a0351aea Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Tue, 4 Apr 2023 11:11:25 -0600 Subject: [PATCH 16/81] #2285 Restructure python scripts: moved to scripts/python/pyemded --- data/Makefile.in | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/data/Makefile.in b/data/Makefile.in index acf549d2de..2bf69df44f 100644 --- a/data/Makefile.in +++ b/data/Makefile.in @@ -362,8 +362,7 @@ SUBDIRS = \ poly \ ps \ table_files \ - tc_data \ - wrappers + tc_data topdir = $(pkgdatadir) top_DATA = \ From 4455c7dca67c6f676b89fc6aee07368ec54e02a4 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:05:51 -0600 Subject: [PATCH 17/81] #2285 Initial release --- scripts/python/pyembed/python_embedding.py | 113 +++++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 scripts/python/pyembed/python_embedding.py diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py new file mode 100644 index 0000000000..f627b6ccd9 --- /dev/null +++ b/scripts/python/pyembed/python_embedding.py @@ -0,0 +1,113 @@ + +######################################################################## +# +# Common APIs for python wrappers by Howard Soh (from scripts by +# George McCabe and Randy Bullock). +# +# This is called when an user specifies python executable (MET_PYTHON_EXE). +# The target python object is saved as a temporary file by user defined python. +# And the python binaries (complied with MET) reads the temporary file and +# builds the python object for MET. +# The temporary file can be any form with matching write/read scripts. +# - NetCDF for gridded data and point observation data. +# - text file (ASCII data) (MPR, point observation). +# +# NOTE: sys.argv is changed by calling call_embedded_python +# +######################################################################## + +import os +import sys +import importlib + +class pyembed_tools(): + + debug = False + class_name = "pyembed_tools" + + @staticmethod + def add_python_path(called_file): # called_file = __file__ + method_name = f"{pyembed_tools.class_name}.add_python_path()" + script_dir = os.path.abspath(os.path.dirname(called_file)) + if os.path.exists(script_dir) and script_dir != os.curdir: + if pyembed_tools.debug: + print(f"{method_name} added python path {script_dir}") + sys.path.append(os.path.abspath(script_dir)) + + # testing purpose (to switch the python path by using MET_BASE) + met_base_dir = os.environ.get('MET_BASE', None) + if met_base_dir is not None: + met_python_path = os.path.join(met_base_dir, 'python') + if pyembed_tools.debug: + print(f"{method_name} added python path {os.path.abspath(met_python_path)} from MET_BASE") + sys.path.append(os.path.abspath(met_python_path)) + + # add share/met/python directory to system path + met_python_path = os.path.join(script_dir, os.pardir, 'python') + if not os.path.exists(met_python_path): + met_python_path = os.path.join(script_dir, os.pardir, os.pardir, 'python') + if os.path.exists(met_python_path) and met_python_path != met_base_dir: + if pyembed_tools.debug: + print(f"{method_name} added python path {os.path.abspath(met_python_path)}") + sys.path.append(os.path.abspath(met_python_path)) + else: + print(" - {d} does not exist".format(d=met_python_path)) + + @staticmethod + def call_python(argv): + print("Python Script:\t" + repr(argv[0])) + print("User Command:\t" + repr(' '.join(argv[2:]))) + print("Temporary File:\t" + repr(argv[1])) + + # argv[0] is the python wrapper script (caller) + # argv[1] contains the temporary filename + # argv[2] contains the user defined python script + pyembed_module_name = argv[2] + sys.argv = argv[2:] + + # add share/met/python directory to system path to find met_point_obs + pyembed_tools.add_python_path(pyembed_module_name) + + # append user script dir to system path + pyembed_dir, _ = os.path.split(pyembed_module_name) + if pyembed_dir: + sys.path.insert(0, pyembed_dir) + + if not pyembed_module_name.endswith('.py'): + pyembed_module_name += '.py' + + user_base = os.path.basename(pyembed_module_name).replace('.py','') + + spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) + met_in = importlib.util.module_from_spec(spec) + spec.loader.exec_module(met_in) + return met_in + + @staticmethod + def read_tmp_ascii(filename): + """ + Arguments: + filename (string): temporary file created by write_tmp_point.py or write_tmp_mpr.py + + Returns: + (list of lists): point or mpr data + """ + f = open(filename, 'r') + lines = f.readlines() + f.close() + + ascii_data = [eval(line.strip('\n')) for line in lines] + + return ascii_data + + @staticmethod + def write_tmp_ascii(filename, met_data): + with open(filename, 'w') as f: + for line in met_data: + f.write(str(line) + '\n') + + +if __name__ == '__main__': + argv_org = sys.argv[:] # save original sys.argv + met_in = pyembed_tools.call_python(os.path.dirname(__file__), sys.argv) + sys.argv[:] = argv_org[:] # restore From b1324b235b978dde25b82d1cf06b2d10fd956f55 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:12:34 -0600 Subject: [PATCH 18/81] #2285 Initial release --- .../vx_pointdata_python/mask_filters.cc | 156 ++++++++++++++++++ .../vx_pointdata_python/mask_filters.h | 82 +++++++++ 2 files changed, 238 insertions(+) create mode 100644 src/libcode/vx_pointdata_python/mask_filters.cc create mode 100644 src/libcode/vx_pointdata_python/mask_filters.h diff --git a/src/libcode/vx_pointdata_python/mask_filters.cc b/src/libcode/vx_pointdata_python/mask_filters.cc new file mode 100644 index 0000000000..bc8aa17d13 --- /dev/null +++ b/src/libcode/vx_pointdata_python/mask_filters.cc @@ -0,0 +1,156 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + + +using namespace std; + +#include "mask_filters.h" + + +//////////////////////////////////////////////////////////////////////// + + + // + // Code for class MaskFilters + // + + +//////////////////////////////////////////////////////////////////////// + + +MaskFilters::MaskFilters(): + grid_mask(0), + area_mask(0), + poly_mask(0), + sid_mask(0), + typ_mask(0) +{ + clear(); +} + +//////////////////////////////////////////////////////////////////////// + +MaskFilters::MaskFilters(Grid *_grid_mask, MaskPlane *_area_mask, MaskPoly *_poly_mask, + StringArray *_sid_mask, StringArray *_typ_mask) { + clear(); + grid_mask = _grid_mask; + area_mask = _area_mask; + poly_mask = _poly_mask; + sid_mask = _sid_mask; + typ_mask = _typ_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::clear() { + grid_mask_cnt = 0; + area_mask_cnt = 0; + poly_mask_cnt = 0; + sid_mask_cnt = 0; + typ_mask_cnt = 0; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered(double lat, double lon) { + bool masked = false; + // Apply the grid mask + if(grid_mask) { + double grid_x, grid_y; + grid_mask->latlon_to_xy(lat, -1.0*lon, grid_x, grid_y); + + if(grid_x < 0 || grid_x >= grid_mask->nx() || + grid_y < 0 || grid_y >= grid_mask->ny()) { + grid_mask_cnt++; + masked = true; + } + + // Apply the area mask + if(area_mask && !masked) { + if(!area_mask->s_is_on(nint(grid_x), nint(grid_y))) { + area_mask_cnt++; + masked = true; + } + } + } + + // Apply the polyline mask + if(poly_mask && !masked) { + if(!poly_mask->latlon_is_inside_dege(lat, lon)) { + poly_mask_cnt++; + masked = true; + } + } + + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered_sid(const char *sid) { + bool masked = false; + + // Apply the station ID mask + if(sid_mask) { + if(!sid_mask->has(sid)) { + sid_mask_cnt++; + masked = true; + } + } + + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +bool MaskFilters::is_filtered_typ(const char *msg_typ) { + bool masked = false; + + // Apply the message type mask + if(typ_mask) { + if(!typ_mask->has(msg_typ)) { + typ_mask_cnt++; + masked = true; + } + } + return masked; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_area_mask(MaskPlane *_area_mask) { + area_mask = _area_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_grid_mask(Grid *_grid_mask) { + grid_mask = _grid_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_poly_mask(MaskPoly *_poly_mask) { + poly_mask = _poly_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_sid_mask(StringArray *_sid_mask) { + sid_mask = _sid_mask; +} + +//////////////////////////////////////////////////////////////////////// + +void MaskFilters::set_typ_mask(StringArray *_typ_mask) { + typ_mask = _typ_mask; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_pointdata_python/mask_filters.h b/src/libcode/vx_pointdata_python/mask_filters.h new file mode 100644 index 0000000000..39d7264138 --- /dev/null +++ b/src/libcode/vx_pointdata_python/mask_filters.h @@ -0,0 +1,82 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + + +//////////////////////////////////////////////////////////////////////// + + +#ifndef __MASK_FILTERS_H__ +#define __MASK_FILTERS_H__ + + +//////////////////////////////////////////////////////////////////////// + + +#include "grid_base.h" + + +//////////////////////////////////////////////////////////////////////// + +class MaskFilters { + +protected: + + int grid_mask_cnt; + int area_mask_cnt; + int poly_mask_cnt; + int typ_mask_cnt; + int sid_mask_cnt; + + Grid *grid_mask; + MaskPlane *area_mask; + MaskPoly *poly_mask; + StringArray *sid_mask; // station IDs to be excuded + StringArray *typ_mask; // message types to be excuded + +public: + + MaskFilters(); + MaskFilters(Grid *grid_mask, MaskPlane *area_mask, + MaskPoly *poly_mask, StringArray *sid_mask, StringArray *typ_mask); + + void clear(); + + int get_area_mask_cnt(); + int get_grid_mask_cnt(); + int get_poly_mask_cnt(); + int get_sid_mask_cnt(); + int get_typ_mask_cnt(); + + bool is_filtered(double lat, double lon); + bool is_filtered_sid(const char *sid); + bool is_filtered_typ(const char *msg_typ); + + void set_area_mask(MaskPlane *_area_mask); + void set_grid_mask(Grid *_grid_mask); + void set_poly_mask(MaskPoly *_poly_mask); + void set_sid_mask(StringArray *_sid_mask); + void set_typ_mask(StringArray *_typ_mask); + +}; + +//////////////////////////////////////////////////////////////////////// + +inline int MaskFilters::get_area_mask_cnt() { return area_mask_cnt; }; +inline int MaskFilters::get_grid_mask_cnt() { return grid_mask_cnt; }; +inline int MaskFilters::get_poly_mask_cnt() { return poly_mask_cnt; }; +inline int MaskFilters::get_sid_mask_cnt() { return sid_mask_cnt; }; +inline int MaskFilters::get_typ_mask_cnt() { return typ_mask_cnt; }; + + +//////////////////////////////////////////////////////////////////////// + + +#endif /* __MASK_FILTERS_H__ */ + + +//////////////////////////////////////////////////////////////////////// From 48c10b4a569f3e5c2abe29e0960c7863105e5267 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:15:43 -0600 Subject: [PATCH 19/81] #2285 Restructure python scripts --- scripts/python/examples/read_ascii_mpr.py | 8 +- scripts/python/examples/read_ascii_numpy.py | 6 +- .../python/examples/read_ascii_numpy_grid.py | 6 +- scripts/python/examples/read_ascii_point.py | 33 ++- scripts/python/examples/read_ascii_xarray.py | 6 +- scripts/python/examples/read_met_point_obs.py | 66 ++---- scripts/python/met/dataplane.py | 223 ++++++++++++------ scripts/python/met/mprbase.py | 20 +- scripts/python/met/point.py | 122 ++++++---- scripts/python/pyembed/read_tmp_ascii.py | 21 +- scripts/python/pyembed/read_tmp_dataplane.py | 29 +-- scripts/python/pyembed/read_tmp_point_nc.py | 25 +- scripts/python/pyembed/write_tmp_dataplane.py | 68 +----- scripts/python/pyembed/write_tmp_mpr.py | 38 +-- scripts/python/pyembed/write_tmp_point.py | 39 +-- scripts/python/pyembed/write_tmp_point_nc.py | 62 ++--- 16 files changed, 347 insertions(+), 425 deletions(-) mode change 100755 => 100644 scripts/python/examples/read_met_point_obs.py diff --git a/scripts/python/examples/read_ascii_mpr.py b/scripts/python/examples/read_ascii_mpr.py index fa71b8e6d2..c8a68912fc 100644 --- a/scripts/python/examples/read_ascii_mpr.py +++ b/scripts/python/examples/read_ascii_mpr.py @@ -1,6 +1,7 @@ -import pandas as pd import os import sys +from met.mprbase import mpr_data + ######################################################################## @@ -21,10 +22,7 @@ print("Input File:\t" + repr(input_file)) # Read MPR lines, skipping the header row and first column. - mpr_data = pd.read_csv(input_file, header=None, - delim_whitespace=True, keep_default_na=False, - skiprows=1, usecols=range(1,37), - dtype=str).values.tolist() + mpr_data = mpr_data.read_mpr(input_file) print("Data Length:\t" + repr(len(mpr_data))) print("Data Type:\t" + repr(type(mpr_data))) except NameError: diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py index eaccb64f2d..974b2ce158 100644 --- a/scripts/python/examples/read_ascii_numpy.py +++ b/scripts/python/examples/read_ascii_numpy.py @@ -1,6 +1,6 @@ import os import sys -from met.dataplane import load_txt, get_grid_metadata +from met.dataplane import dataplane ########################################### @@ -19,8 +19,8 @@ input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = load_txt(input_file, data_name) +met_data = dataplane.load_txt(input_file, data_name) -attrs = get_grid_metadata(data_name) +attrs = dataplane.get_grid_metadata(data_name) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_numpy_grid.py b/scripts/python/examples/read_ascii_numpy_grid.py index c3bb071616..e1f5cb1a34 100644 --- a/scripts/python/examples/read_ascii_numpy_grid.py +++ b/scripts/python/examples/read_ascii_numpy_grid.py @@ -1,6 +1,6 @@ import os import sys -from met.dataplane import load_txt, get_grid_metadata_from_env +from met.dataplane import dataplane ########################################### @@ -19,11 +19,11 @@ input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = load_txt(input_file, data_name) +met_data = dataplane.load_txt(input_file, data_name) ## create the metadata dictionary from the environment variable, ## Default env_name = 'PYTHON_GRID' -attrs = get_grid_metadata_from_env(data_name) +attrs = dataplane.get_grid_metadata_from_env(data_name) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_point.py b/scripts/python/examples/read_ascii_point.py index bc20a5beeb..f153e8bfd7 100644 --- a/scripts/python/examples/read_ascii_point.py +++ b/scripts/python/examples/read_ascii_point.py @@ -1,9 +1,7 @@ -import pandas as pd import os import sys -sys.path.append(os.path.abspath(os.path.dirname(__file__))) # for standalone -from met_point_obs import convert_point_data +from met.point import met_point_tools as tools ######################################################################## @@ -14,20 +12,22 @@ ## load the data into the numpy array ## -if len(sys.argv) < 2: +arg_cnt = len(sys.argv) +if arg_cnt < 2: print("ERROR: read_ascii_point.py -> Missing an input file.") sys.exit(1) -do_convert_to_met_point_data = False +do_convert = False last_index = 2 -if len(sys.argv) > last_index: - if sys.argv[2].lower() == "do_convert" or sys.argv[2].lower() == "convert": - do_convert_to_met_point_data = True +if arg_cnt > last_index: + opt_convert = sys.argv[2] + if opt_convert.lower() == "do_convert" or opt_convert.lower() == "convert": + do_convert = True last_index += 1 -if last_index < len(sys.argv): - print(" INFO: read_ascii_point.py -> Too many argument, ignored {a}.".format( - a=' '.join(sys.argv[last_index:]))) +if last_index < arg_cnt: + print(" INFO: read_ascii_point.py -> Too many argument, ignored {o}.".format( + o=' '.join(sys.argv[last_index:]))) # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) @@ -47,19 +47,14 @@ # (10) string: QC_String # (11) numeric: Observation_Value - point_data = pd.read_csv(input_file, header=None, delim_whitespace=True, keep_default_na=False, - names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], - dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + point_data = tools.read_text_point_obs(input_file) print(" point_data: Data Length:\t" + repr(len(point_data))) print(" point_data: Data Type:\t" + repr(type(point_data))) - if do_convert_to_met_point_data: - met_point_data = convert_point_data(point_data) + if do_convert: + met_point_data = tools.convert_point_data(point_data) print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) except FileNotFoundError: print(f"The input file {input_file} does not exist") sys.exit(1) -except Name`Error: - print(f"Can't find the input file {input_file}") - sys.exit(1) ######################################################################## diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py index bc2fc6b589..91c8edb6ea 100644 --- a/scripts/python/examples/read_ascii_xarray.py +++ b/scripts/python/examples/read_ascii_xarray.py @@ -1,7 +1,7 @@ import os import sys import xarray as xr -from met.dataplane import load_txt, get_grid_metadata +from met.dataplane import dataplane ########################################### @@ -20,7 +20,7 @@ input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = load_txt(input_file, data_name) +met_data = dataplane.load_txt(input_file, data_name) ########################################### @@ -28,7 +28,7 @@ ## create the metadata dictionary ## -attrs = get_grid_metadata(data_name) +attrs = dataplane.get_grid_metadata(data_name) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_met_point_obs.py b/scripts/python/examples/read_met_point_obs.py old mode 100755 new mode 100644 index 57ccd22e7a..8fbb5ef85c --- a/scripts/python/examples/read_met_point_obs.py +++ b/scripts/python/examples/read_met_point_obs.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 ''' Created on Nov 10, 2021 @@ -12,9 +11,8 @@ python3 read_met_point_obs.py : 11 columns 'typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs' - string columns: 'typ', 'sid', 'vld', 'var', , 'qc' - numeric columns 'lat', 'lon', 'elv', 'lvl', 'hgt', 'qc', 'obs' - python3 read_met_point_obs.py + string columns: 'typ', 'sid', 'vld', 'var', , 'qc' + numeric columns: 'lat', 'lon', 'elv', 'lvl', 'hgt', 'qc', 'obs' ''' @@ -22,68 +20,36 @@ import sys from datetime import datetime -met_base_dir = os.getenv('MET_BASE',None) -if met_base_dir is not None: - sys.path.append(os.path.join(met_base_dir, 'python')) - -from met_point_obs import met_point_obs, sample_met_point_obs -from met_point_obs_nc import nc_point_obs +from met.point import met_point_tools +from pyembed.python_embedding import pyembed_tools -DO_PRINT_DATA = False ARG_PRINT_DATA = 'show_data' +DO_PRINT_DATA = ARG_PRINT_DATA == sys.argv[-1] start_time = datetime.now() -prompt = met_point_obs.get_prompt() point_obs_data = None -if len(sys.argv) == 1 or ARG_PRINT_DATA == sys.argv[1]: - point_obs_data = sample_met_point_obs() +input_name = sys.argv[1] +prompt = met_point_tools.get_prompt() +if len(sys.argv) == 1 or ARG_PRINT_DATA == input_name: + point_obs_data = met_point_tools.get_sample_met_point_obs() point_obs_data.read_data([]) -elif met_point_obs.is_python_prefix(sys.argv[1]): - import importlib.util - - print("{p} Python Script:\t".format(p=prompt) + repr(sys.argv[0])) - print("{p} User Command:\t".format(p=prompt) + repr(' '.join(sys.argv[2:]))) - - pyembed_module_name = sys.argv[2] - sys.argv = sys.argv[1:] - - # append user script dir to system path - pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) - if pyembed_dir: - sys.path.insert(0, pyembed_dir) - - if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - os.environ[met_point_obs.MET_ENV_RUN] = "TRUE" - - user_base = os.path.basename(pyembed_module_name).replace('.py','') - - spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) - met_in = importlib.util.module_from_spec(spec) - spec.loader.exec_module(met_in) - - met_point_obs = met_in.met_point_obs - print("met_point_obs: ", met_point_obs) - met_point_data = met_in.met_point_data - print("met_point_data: ", met_point_data) - #print(hasattr("met_in: ", dir(met_in))) - #met_point_data = met_point_obs.get_point_data() - #met_point_data = None if met_in.get('met_point_data', None) else met_in.met_point_data - #met_data = None if met_in.get('met_data', None) else met_in.met_data - print(met_point_data) +elif met_point_tools.is_python_prefix(input_name): + point_obs_data = pyembed_tools.call_python(sys.argv) else: - netcdf_filename = sys.argv[1] + netcdf_filename = os.path.expandvars(input_name) args = [ netcdf_filename ] #args = { 'nc_name': netcdf_filename } - point_obs_data = nc_point_obs() + point_obs_data = met_point_tools.get_nc_point_obs() point_obs_data.read_data(point_obs_data.get_nc_filename(args)) if point_obs_data is not None: met_point_data = point_obs_data.get_point_data() met_point_data['met_point_data'] = point_obs_data + print("met_point_data: ", met_point_data) + print(met_point_data) - if DO_PRINT_DATA or ARG_PRINT_DATA == sys.argv[-1]: + if DO_PRINT_DATA: point_obs_data.dump() run_time = datetime.now() - start_time diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py index b036a25c85..841c000fe1 100644 --- a/scripts/python/met/dataplane.py +++ b/scripts/python/met/dataplane.py @@ -1,80 +1,151 @@ -import numpy as np import os +import numpy as np +import netCDF4 as nc ########################################### -def load_txt(input_file, data_name) - try: - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) - except NameError: - met_data = None - print("Can't find the input file") - return met_data - -## -## create the metadata dictionary -## - -def get_grid_metadata(data_name): - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - - } - return attrs - -## -## create the metadata dictionary from the environment variable PYTHON_GRID -## - -def get_grid_metadata_from_env(data_name, grid_env_name='PYTHON_GRID'): - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - 'grid': os.path.expandvars(os.getenv(grid_env_name)) - } - return attrs +class dataplane(): + + ## + ## create the metadata dictionary + ## + + #@staticmethod + def get_grid_metadata(data_name): + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': data_name, + 'long_name': data_name + '_word', + 'level': 'Surface', + 'units': 'None', + + 'grid': { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, + } + + } + return attrs + + ## + ## create the metadata dictionary from the environment variable PYTHON_GRID + ## + + #@staticmethod + def get_grid_metadata_from_env(data_name, grid_env_name='PYTHON_GRID'): + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': data_name, + 'long_name': data_name + '_word', + 'level': 'Surface', + 'units': 'None', + 'grid': os.path.expandvars(os.getenv(grid_env_name)) + } + return attrs + + #@staticmethod + def load_txt(input_file, data_name): + try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + met_data = np.loadtxt(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) + except NameError: + met_data = None + print("Can't find the input file") + return met_data + + #@staticmethod + def read_dataplane(netcdf_filename): + # read NetCDF file + ds = nc.Dataset(netcdf_filename, 'r') + met_data = ds['met_data'][:] + met_attrs = {} + + # grid is defined as a dictionary or string + grid = {} + for attr, attr_val in ds.__dict__.items(): + if 'grid.' in attr: + grid_attr = attr.split('.')[1] + grid[grid_attr] = attr_val + else: + met_attrs[attr] = attr_val + + if grid: + met_attrs['grid'] = grid + + met_attrs['name'] = met_attrs['name_str'] + del met_attrs['name_str'] + + met_info = {} + met_info['met_data'] = met_data + met_info['attrs'] = met_attrs + return met_info + + #@staticmethod + def write_dataplane(met_in, netcdf_filename): + met_info = {'met_data': met_in.met_data} + if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: + attrs = met_in.met_data.attrs + else: + attrs = met_in.attrs + met_info['attrs'] = attrs + + # determine fill value + try: + fill = met_in.met_data.get_fill_value() + except: + fill = -9999. + + # write NetCDF file + ds = nc.Dataset(netcdf_filename, 'w') + + # create dimensions and variable + nx, ny = met_in.met_data.shape + ds.createDimension('x', nx) + ds.createDimension('y', ny) + dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) + dp[:] = met_in.met_data + + # append attributes + for attr, attr_val in met_info['attrs'].items(): + if attr == 'name': + setattr(ds, 'name_str', attr_val) + elif type(attr_val) == dict: + for key in attr_val: + setattr(ds, attr + '.' + key, attr_val[key]) + else: + setattr(ds, attr, attr_val) + + ds.close() + diff --git a/scripts/python/met/mprbase.py b/scripts/python/met/mprbase.py index 9ddc20c294..c50ab89a29 100644 --- a/scripts/python/met/mprbase.py +++ b/scripts/python/met/mprbase.py @@ -2,15 +2,17 @@ ######################################################################## -def read_mpr(input_file, usecols=range(1,37), header=None, - delim_whitespace=True, keep_default_na=False, - skiprows=1, dtype=str): - mpr_data = pd.read_csv(input_file, header=header, - delim_whitespace=delim_whitespace, - keep_default_na=keep_default_na, - skiprows=skiprows, usecols=usecols, - dtype=dtype).values.tolist() - return mpr_data +class mpr_data(): + + def read_mpr(input_file, usecols=range(1,37), header=None, + delim_whitespace=True, keep_default_na=False, + skiprows=1, dtype=str): + mpr_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + skiprows=skiprows, usecols=usecols, + dtype=dtype).values.tolist() + return mpr_data ######################################################################## diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py index 7de2f2014f..77bfcadd1f 100644 --- a/scripts/python/met/point.py +++ b/scripts/python/met/point.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 ''' Created on Nov 10, 2021 @@ -9,57 +8,52 @@ "def read_data(self, args)" which fills the array variables at __init__(). - The args can be 1) single string argument, 2) the list of arguments, or 3) the dictionary of arguments. -- A python objects, met_point_data, must set: +- Either "point_data" or "met_point_data" python object (variable) must set: + + "point_data" is from 11 column text input + + "met_point_data" is array of neaders and observation data. + "point_obs_data" is an optional to use custom python EXE. It's a python instance which processes the point observation data - The customized script is expected to include following codes: - # prepare arguments for the customized script - args = {'input', sys.argv[1]} # or args = [] - point_obs_data = custom_reader() - point_obs_data.read_data(args) - met_point_data = point_obs_data.get_point_data() + + Note: csv_point_obs is an example of met_point_data, not point_data + + + Example of "point_data": see met_point_tools.read_text_point_obs() + +def read_custom_data(data_filename): + # Implemente here + return the array of 11 column data + +# prepare arguments for the customized script +data_filename = sys.arg[1] +point_data = read_custom_data(data_filename) + + + + Example of "met_point_data": see csv_point_obs + +from met.point import met_point_obs + +class custom_reader(met_point_obs): + + def read_data(data_filename): + # Implemente here + +# prepare arguments for the customized script +data_filename = sys.argv[1] +point_obs_data = custom_reader() +point_obs_data.read_data(data_filename) +met_point_data = point_obs_data.get_point_data() ''' import os from abc import ABC, abstractmethod + import numpy as np +import netCDF4 as nc import pandas as pd COUNT_SHOW = 30 -def get_prompt(): - return " python:" - -def met_is_python_prefix(user_cmd): - return user_cmd.startswith(base_met_point_obs.python_prefix) - -######################################################################## - -# Read the input file as the first argument -def read_ascii_point_obs(input_file, header=None, - delim_whitespace=True, keep_default_na=False): - # Read and format the input 11-column observations: - # (1) string: Message_Type - # (2) string: Station_ID - # (3) string: Valid_Time(YYYYMMDD_HHMMSS) - # (4) numeric: Lat(Deg North) - # (5) numeric: Lon(Deg East) - # (6) numeric: Elevation(msl) - # (7) string: Var_Name(or GRIB_Code) - # (8) numeric: Level - # (9) numeric: Height(msl or agl) - # (10) string: QC_String - # (11) numeric: Observation_Value - ascii_point_data = pd.read_csv(input_file, header=header, - delim_whitespace=delim_whitespace, - keep_default_na=keep_default_na, - names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], - dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() - return ascii_point_data - - class base_met_point_obs(object): ''' classdocs @@ -304,13 +298,17 @@ def put_data(self, point_obs_dict): if po_array is not None: self.hdr_inst_typ = po_array + @staticmethod + def get_prompt(): + return " python:" + @staticmethod def error_msg(msg): - print(f'{get_prompt()} {base_met_point_obs.ERROR_P} {msg}') + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.ERROR_P} {msg}') @staticmethod def info_msg(msg): - print(f'{get_prompt()} {base_met_point_obs.INFO_P} {msg}') + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.INFO_P} {msg}') @staticmethod def get_python_script(arg_value): @@ -385,7 +383,7 @@ def print_point_data(met_point_data, print_subset=True): print(' === MET point data by python embedding ===') -class csv_point_obs(ABC, base_met_point_obs): +class csv_point_obs(base_met_point_obs): def __init__(self, point_data): self.point_data = point_data @@ -626,14 +624,52 @@ def read_data(self, args): ''' pass + +class met_point_tools(): + + @staticmethod + def convert_point_data(point_data, check_all_records=False, input_type='csv'): + convert_point_data(point_data, check_all_records, input_type) + @staticmethod def get_prompt(): - return get_prompt() + return " python:" + + @staticmethod + def get_nc_point_obs(): + return nc_point_obs() + + @staticmethod + def get_sample_point_obs(): + return sample_met_point_obs() @staticmethod def is_python_prefix(user_cmd): return user_cmd.startswith(base_met_point_obs.python_prefix) + @staticmethod + # Read the input file which is 11 column text file as the first argument + def read_text_point_obs(input_file, header=None, + delim_whitespace=True, keep_default_na=False): + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + ascii_point_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], + dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + return ascii_point_data + # Note: caller should import netCDF4 # the argements nc_group(dataset) and nc_var should not be None @@ -929,7 +965,7 @@ def main_nc(argv): point_obs_data = nc_point_obs() point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) met_point_data = point_obs_data.save_ncfile(tmp_nc_name) - print(f'{get_prompt()} saved met_point_data to {tmp_nc_name}') + print(f'{base_met_point_obs.get_prompt()} saved met_point_data to {tmp_nc_name}') met_point_data['met_point_data'] = point_obs_data if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: diff --git a/scripts/python/pyembed/read_tmp_ascii.py b/scripts/python/pyembed/read_tmp_ascii.py index fb7eb7b4e7..b508f6cbef 100644 --- a/scripts/python/pyembed/read_tmp_ascii.py +++ b/scripts/python/pyembed/read_tmp_ascii.py @@ -20,21 +20,14 @@ import argparse -def read_tmp_ascii(filename): - """ - Arguments: - filename (string): temporary file created by write_tmp_point.py or write_tmp_mpr.py +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools - Returns: - (list of lists): point or mpr data - """ - f = open(filename, 'r') - lines = f.readlines() - f.close() - - global ascii_data - ascii_data = [eval(line.strip('\n')) for line in lines] - +def read_tmp_ascii(filename): + global ascii_data # defined at python_handler.cc (tmp_list_name) + ascii_data = pyembed_tools.read_tmp_ascii(filename) return ascii_data if __name__ == '__main__': diff --git a/scripts/python/pyembed/read_tmp_dataplane.py b/scripts/python/pyembed/read_tmp_dataplane.py index 98bbe728d8..aa2bc6046a 100644 --- a/scripts/python/pyembed/read_tmp_dataplane.py +++ b/scripts/python/pyembed/read_tmp_dataplane.py @@ -7,31 +7,10 @@ ######################################################################## import sys -import numpy as np -import netCDF4 as nc -met_info = {} -netcdf_filename = sys.argv[1] +# PYTHON path for met.dataplane is added by write_tmp_dataplane.py +from met.dataplane import dataplane +netcdf_filename = sys.argv[1] # read NetCDF file -ds = nc.Dataset(netcdf_filename, 'r') -met_data = ds['met_data'][:] -met_attrs = {} - -# grid is defined as a dictionary or string -grid = {} -for attr, attr_val in ds.__dict__.items(): - if 'grid.' in attr: - grid_attr = attr.split('.')[1] - grid[grid_attr] = attr_val - else: - met_attrs[attr] = attr_val - -if grid: - met_attrs['grid'] = grid - -met_attrs['name'] = met_attrs['name_str'] -del met_attrs['name_str'] -met_info['met_data'] = met_data -met_info['attrs'] = met_attrs - +met_info = dataplane.read_dataplane(netcdf_filename) diff --git a/scripts/python/pyembed/read_tmp_point_nc.py b/scripts/python/pyembed/read_tmp_point_nc.py index 774738a7c7..d9df6dfdec 100644 --- a/scripts/python/pyembed/read_tmp_point_nc.py +++ b/scripts/python/pyembed/read_tmp_point_nc.py @@ -6,26 +6,25 @@ # ######################################################################## -import os import sys -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) -from met_point_obs import met_point_obs -from met_point_obs_nc import nc_point_obs +from met.point import met_point_tools +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools -netcdf_filename = sys.argv[1] +#pyembed_tools.add_python_path(__file__) + +input_filename = sys.argv[1] # read NetCDF file -print('{p} reading {f}'.format(p=met_point_obs.get_prompt(), f=netcdf_filename)) +print('{p} reading {f}'.format(p=met_point_tools.get_prompt(), f=input_filename)) try: - point_obs_data = nc_point_obs() - point_obs_data.read_data(netcdf_filename) + point_obs_data = met_point_tools.get_nc_point_obs() + point_obs_data.read_data(input_filename) met_point_data = point_obs_data.get_point_data() met_point_data['met_point_data'] = point_obs_data except: - from read_tmp_ascii import read_tmp_ascii - - point_data = read_tmp_ascii(netcdf_filename) + point_data = pyembed_tools.read_tmp_ascii(input_filename) diff --git a/scripts/python/pyembed/write_tmp_dataplane.py b/scripts/python/pyembed/write_tmp_dataplane.py index 476d2348c3..150f69979b 100644 --- a/scripts/python/pyembed/write_tmp_dataplane.py +++ b/scripts/python/pyembed/write_tmp_dataplane.py @@ -8,68 +8,20 @@ # ######################################################################## -import os import sys -import importlib.util -import netCDF4 as nc -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) - -netcdf_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -met_info = {'met_data': met_in.met_data} -if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: - attrs = met_in.met_data.attrs -else: - attrs = met_in.attrs -met_info['attrs'] = attrs - -# determine fill value try: - fill = met_data.get_fill_value() + from python_embedding import pyembed_tools except: - fill = -9999. - -# write NetCDF file -ds = nc.Dataset(netcdf_filename, 'w') + from pyembed.python_embedding import pyembed_tools -# create dimensions and variable -nx, ny = met_in.met_data.shape -ds.createDimension('x', nx) -ds.createDimension('y', ny) -dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) -dp[:] = met_in.met_data +pyembed_tools.add_python_path(__file__) +from met.dataplane import dataplane -# append attributes -for attr, attr_val in met_info['attrs'].items(): - if attr == 'name': - setattr(ds, 'name_str', attr_val) - elif type(attr_val) == dict: - for key in attr_val: - setattr(ds, attr + '.' + key, attr_val[key]) - else: - setattr(ds, attr, attr_val) +#def write_dataplane(met_in, netcdf_filename): +# dataplane.write_dataplane(met_in, netcdf_filename) -ds.close() +if __name__ == '__main__': + netcdf_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + dataplane.write_dataplane(met_in, netcdf_filename) diff --git a/scripts/python/pyembed/write_tmp_mpr.py b/scripts/python/pyembed/write_tmp_mpr.py index 3eee0379f5..da4f65fb03 100644 --- a/scripts/python/pyembed/write_tmp_mpr.py +++ b/scripts/python/pyembed/write_tmp_mpr.py @@ -8,36 +8,16 @@ # ######################################################################## -import os import sys -import importlib.util -print("Python Script:\t" + repr(sys.argv[0])) -print("User Command:\t" + repr(' '.join(sys.argv[2:]))) -print("Temporary File:\t" + repr(sys.argv[1])) +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] +if __name__ == '__main__': + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -f = open(tmp_filename, 'w') -for line in met_in.mpr_data: - f.write(str(line) + '\n') + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.mpr_data) diff --git a/scripts/python/pyembed/write_tmp_point.py b/scripts/python/pyembed/write_tmp_point.py index a289a4c331..edcbdbd877 100644 --- a/scripts/python/pyembed/write_tmp_point.py +++ b/scripts/python/pyembed/write_tmp_point.py @@ -8,41 +8,14 @@ # ######################################################################## -import os import sys -import importlib.util - -def write_tmp_ascii(filename, point_data): - with open(filename, 'w') as f: - for line in point_data: - f.write(str(line) + '\n') +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools if __name__ == '__main__': - print("Python Script:\t" + repr(sys.argv[0])) - print("User Command:\t" + repr(' '.join(sys.argv[2:]))) - print("Temporary File:\t" + repr(sys.argv[1])) - tmp_filename = sys.argv[1] - pyembed_module_name = sys.argv[2] - sys.argv = sys.argv[2:] - - # add share/met/python directory to system path to find met_point_obs - sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) - - # append user script dir to system path - pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) - if pyembed_dir: - sys.path.insert(0, pyembed_dir) - - if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - - user_base = os.path.basename(pyembed_module_name).replace('.py','') - - spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) - met_in = importlib.util.module_from_spec(spec) - spec.loader.exec_module(met_in) - - write_tmp_ascii(tmp_filename, met_in.point_data) + met_in = pyembed_tools.call_python(sys.argv) + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) diff --git a/scripts/python/pyembed/write_tmp_point_nc.py b/scripts/python/pyembed/write_tmp_point_nc.py index 35ca014fd7..c12683453e 100644 --- a/scripts/python/pyembed/write_tmp_point_nc.py +++ b/scripts/python/pyembed/write_tmp_point_nc.py @@ -10,50 +10,28 @@ import os import sys -import importlib.util -# add share/met/python directory to system path to find met_point_obs -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, 'python'))) +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools -from met_point_obs import met_point_obs -from met_point_obs_nc import nc_point_obs - -PROMPT = met_point_obs.get_prompt() -print("{p} Python Script:\t".format(p=PROMPT) + repr(sys.argv[0])) -print("{p} User Command:\t".format(p=PROMPT) + repr(' '.join(sys.argv[2:]))) -print("{p} Temporary File:\t".format(p=PROMPT) + repr(sys.argv[1])) +pyembed_tools.add_python_path(__file__) +from met.point import met_point_tools -tmp_filename = sys.argv[1] -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] +if __name__ == '__main__': + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - sys.path.insert(0, pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -if hasattr(met_in, 'point_data'): - from write_tmp_point import write_tmp_ascii - - write_tmp_ascii(tmp_filename, met_in.point_data) -elif hasattr(met_in, 'point_obs_data'): - met_in.point_obs_data.save_ncfile(tmp_filename) -else: - if hasattr(met_in.met_point_data, 'point_obs_data'): - met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) + if hasattr(met_in, 'point_data'): + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) + elif hasattr(met_in, 'point_obs_data'): + met_in.point_obs_data.save_ncfile(tmp_filename) else: - tmp_point_obs = nc_point_obs() - tmp_point_obs.put_data(met_in.met_point_data) - tmp_point_obs.save_ncfile(tmp_filename) - -#print('{p} writing {f}'.format(p=PROMPT, f=tmp_filename)) + if hasattr(met_in.met_point_data, 'point_obs_data'): + met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) + else: + tmp_point_obs = met_point_tools.get_nc_point_obs() + tmp_point_obs.put_data(met_in.met_point_data) + tmp_point_obs.save_ncfile(tmp_filename) From a14085dd86406d9b2b7cc38a401a3886faf29d65 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:16:40 -0600 Subject: [PATCH 20/81] #2285 Restructure python scripts --- scripts/python/met/Makefile.am | 2 +- scripts/python/met/Makefile.in | 2 +- scripts/python/pyembed/Makefile.am | 1 + scripts/python/pyembed/Makefile.in | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/python/met/Makefile.am b/scripts/python/met/Makefile.am index 4d02474315..9e430722af 100644 --- a/scripts/python/met/Makefile.am +++ b/scripts/python/met/Makefile.am @@ -25,8 +25,8 @@ pythonmetscriptsdir = $(pkgdatadir)/python/met pythonmetscripts_DATA = \ - mprbase.py \ dataplane.py \ + mprbase.py \ point.py EXTRA_DIST = ${pythonmetscripts_DATA} diff --git a/scripts/python/met/Makefile.in b/scripts/python/met/Makefile.in index f0fa53e57b..488e85355e 100644 --- a/scripts/python/met/Makefile.in +++ b/scripts/python/met/Makefile.in @@ -298,8 +298,8 @@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ pythonmetscriptsdir = $(pkgdatadir)/python/met pythonmetscripts_DATA = \ - mprbase.py \ dataplane.py \ + mprbase.py \ point.py EXTRA_DIST = ${pythonmetscripts_DATA} diff --git a/scripts/python/pyembed/Makefile.am b/scripts/python/pyembed/Makefile.am index 9b69b8d376..ca8a3cb66e 100644 --- a/scripts/python/pyembed/Makefile.am +++ b/scripts/python/pyembed/Makefile.am @@ -25,6 +25,7 @@ pyembed_DATA = \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ diff --git a/scripts/python/pyembed/Makefile.in b/scripts/python/pyembed/Makefile.in index a7f1932056..bd0848e94e 100644 --- a/scripts/python/pyembed/Makefile.in +++ b/scripts/python/pyembed/Makefile.in @@ -361,6 +361,7 @@ pyembed_DATA = \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ From 0c1aaad1c62695f7dcba0c4d10c16454b3695d42 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:19:48 -0600 Subject: [PATCH 21/81] #2285 Adjusted the path for python scripts --- src/basic/vx_util/python_line.cc | 50 +++++++++++++++++++++++++------- src/basic/vx_util/python_line.h | 3 ++ 2 files changed, 42 insertions(+), 11 deletions(-) diff --git a/src/basic/vx_util/python_line.cc b/src/basic/vx_util/python_line.cc index ebc4bfb4cc..623e1d1e25 100644 --- a/src/basic/vx_util/python_line.cc +++ b/src/basic/vx_util/python_line.cc @@ -32,9 +32,13 @@ using namespace std; //////////////////////////////////////////////////////////////////////// -static const char set_python_env_wrapper [] = "set_python_env"; +static const char env_PYTHONPATH [] = "PYTHONPATH"; -static const char write_tmp_mpr_wrapper [] = "MET_BASE/wrappers/write_tmp_mpr.py"; +static const char met_python_path [] = "MET_BASE/python"; + +static const char set_python_env_wrapper [] = "pyembed.set_python_env"; + +static const char write_tmp_mpr_wrapper [] = "MET_BASE/python/pyembed/write_tmp_mpr.py"; static const char list_name [] = "mpr_data"; @@ -301,9 +305,7 @@ void PyLineDataFile::do_straight() { -ConcatString command, path, user_base; - -path = set_python_env_wrapper; +ConcatString command, user_base; mlog << Debug(3) << "PyLineDataFile::do_straight() -> " @@ -318,7 +320,7 @@ user_base.chomp(".py"); // start up the python interpreter // -script = new Python3_Script (path.text()); +script = get_python3_script(); // // set up a "new" sys.argv list @@ -434,11 +436,7 @@ if ( status ) { } -ConcatString wrapper; - -wrapper = set_python_env_wrapper; - -script = new Python3_Script (wrapper.text()); +script = get_python3_script(); mlog << Debug(4) << "Reading temporary Python line data file: " << tmp_ascii_path << "\n"; @@ -574,6 +572,36 @@ return; } +//////////////////////////////////////////////////////////////////////// + +Python3_Script *get_python3_script() + +{ + +const char *method_name = "get_python3_script()"; +ConcatString path = set_python_env_wrapper; +ConcatString python_path = met_python_path; + +const char *env_pythonpath = getenv(env_PYTHONPATH); + +if (env_pythonpath) { + python_path = env_pythonpath; + python_path.add(':'); +} +python_path.add(replace_path(met_python_path)); +mlog << Debug(0) << method_name << " -> added python path " + << replace_path(met_python_path) << ") to " << env_PYTHONPATH << "\n"; + +setenv(env_PYTHONPATH, python_path.c_str(),1); + + // + // start up the python interpreter + // + +return new Python3_Script (path.text()); + +} + //////////////////////////////////////////////////////////////////////// diff --git a/src/basic/vx_util/python_line.h b/src/basic/vx_util/python_line.h index 604066e126..9b39e074da 100644 --- a/src/basic/vx_util/python_line.h +++ b/src/basic/vx_util/python_line.h @@ -91,6 +91,9 @@ class PyLineDataFile : public LineDataFile { //////////////////////////////////////////////////////////////////////// +extern Python3_Script *get_python3_script(); + +//////////////////////////////////////////////////////////////////////// #endif /* __PYTHON_LINE_H__ */ From 6308f78db652af1a58c2d93505f9859e215d77a4 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:21:46 -0600 Subject: [PATCH 22/81] #2285 Adjusted the path for python scripts --- src/libcode/vx_data2d_python/python_dataplane.cc | 4 ++-- src/libcode/vx_python3_utils/python3_script.cc | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libcode/vx_data2d_python/python_dataplane.cc b/src/libcode/vx_data2d_python/python_dataplane.cc index fd8524b27f..00217bb7b2 100644 --- a/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/src/libcode/vx_data2d_python/python_dataplane.cc @@ -31,9 +31,9 @@ extern GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_dataplane.py"; +static const char write_tmp_nc [] = "MET_BASE/python/pyembed/write_tmp_dataplane.py"; -static const char read_tmp_nc [] = "read_tmp_dataplane"; // NO ".py" suffix +static const char read_tmp_nc [] = "pyembed.read_tmp_dataplane"; // NO ".py" suffix static const char tmp_nc_var_name [] = "met_info"; diff --git a/src/libcode/vx_python3_utils/python3_script.cc b/src/libcode/vx_python3_utils/python3_script.cc index e34fb3ed7a..34fc038d71 100644 --- a/src/libcode/vx_python3_utils/python3_script.cc +++ b/src/libcode/vx_python3_utils/python3_script.cc @@ -26,7 +26,7 @@ using namespace std; static const char sq = '\''; // single quote -static const char read_tmp_ascii_py [] = "MET_BASE/wrappers/read_tmp_ascii.py"; +static const char read_tmp_ascii_py [] = "MET_BASE/python/pyembed/read_tmp_ascii.py"; //////////////////////////////////////////////////////////////////////// From 30e28bfbc4172cf2fe50eae03ba06c7e8530c8b4 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:22:24 -0600 Subject: [PATCH 23/81] #2285 Adjusted the path for python scripts --- src/tools/other/ascii2nc/python_handler.cc | 26 ++++++++-------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/src/tools/other/ascii2nc/python_handler.cc b/src/tools/other/ascii2nc/python_handler.cc index 9ea61d5a8a..4e4c0a820e 100644 --- a/src/tools/other/ascii2nc/python_handler.cc +++ b/src/tools/other/ascii2nc/python_handler.cc @@ -18,6 +18,7 @@ using namespace std; #include "vx_log.h" #include "vx_math.h" +#include "python_line.h" #include "vx_python3_utils.h" #include "python_handler.h" @@ -25,10 +26,7 @@ using namespace std; //////////////////////////////////////////////////////////////////////// - -static const char set_python_env_wrapper [] = "set_python_env"; - -static const char write_tmp_ascii_wrapper[] = "MET_BASE/wrappers/write_tmp_point.py"; +static const char write_tmp_ascii_wrapper[] = "MET_BASE/python/pyembed/write_tmp_point.py"; static const char list_name [] = "point_data"; @@ -247,9 +245,7 @@ bool PythonHandler::do_straight() { -ConcatString command, path, user_base; - -path = set_python_env_wrapper; +ConcatString command, user_base; mlog << Debug(3) << "Running user's python script (" @@ -263,7 +259,7 @@ user_base.chomp(".py"); // start up the python interpreter // -Python3_Script script(path.text()); +Python3_Script *script = get_python3_script(); // // set up a "new" sys.argv list @@ -271,7 +267,7 @@ Python3_Script script(path.text()); // the user's script // -script.reset_argv(user_script_filename.text(), user_script_args); +script->reset_argv(user_script_filename.text(), user_script_args); // // import the user's script as a module @@ -380,20 +376,16 @@ if ( status ) { } -ConcatString wrapper; - -wrapper = set_python_env_wrapper; - -Python3_Script script(wrapper.text()); +Python3_Script *script = get_python3_script(); mlog << Debug(4) << "Reading temporary Python ascii observation file: " << tmp_ascii_path << "\n"; -script.import_read_tmp_ascii_py(); +script->import_read_tmp_ascii_py(); -PyObject * dobj = script.read_tmp_ascii(tmp_ascii_path.text()); +PyObject * dobj = script->read_tmp_ascii(tmp_ascii_path.text()); -PyObject * obj = script.lookup_ascii(tmp_list_name); +PyObject * obj = script->lookup_ascii(tmp_list_name); if ( ! PyList_Check(obj) ) { From 521fb145745baae539ce7618441c662d957976ef Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:25:35 -0600 Subject: [PATCH 24/81] #2285 Removed xarray and added MaskFilters --- .../vx_pointdata_python/python_pointdata.cc | 382 ++++++++++++++---- .../vx_pointdata_python/python_pointdata.h | 8 +- 2 files changed, 304 insertions(+), 86 deletions(-) diff --git a/src/libcode/vx_pointdata_python/python_pointdata.cc b/src/libcode/vx_pointdata_python/python_pointdata.cc index d389596fea..35e20e61e2 100644 --- a/src/libcode/vx_pointdata_python/python_pointdata.cc +++ b/src/libcode/vx_pointdata_python/python_pointdata.cc @@ -8,7 +8,10 @@ //////////////////////////////////////////////////////////////////////// +#include +#include +#include "observation.h" #include "vx_python3_utils.h" #include "python_pointdata.h" #include "pointdata_from_array.h" @@ -27,22 +30,120 @@ extern GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_point_nc.py"; +static const char write_tmp_nc [] = "MET_BASE/python/pyembed/write_tmp_point_nc.py"; -static const char read_tmp_nc [] = "read_tmp_point_nc"; // NO ".py" suffix +static const char read_tmp_nc [] = "pyembed.read_tmp_point_nc"; // NO ".py" suffix //////////////////////////////////////////////////////////////////////// static bool tmp_nc_point_obs(const char * script_name, int user_script_argc, - char ** user_script_argv, MetPointDataPython &met_pd_out); + char ** user_script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters); static bool straight_python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out); + MetPointDataPython &met_pd_out, + MaskFilters *filters); + +bool process_point_data(PyObject *module_obj, MetPointDataPython &met_pd_out); +bool process_point_data_list(PyObject *python_obj, MetPointDataPython &met_pd_out, + MaskFilters *filters); + +//////////////////////////////////////////////////////////////////////// + +void check_header_data(MetPointHeader *header_data, const char *caller) { + + if (header_data->typ_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_typ is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->sid_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_sid is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->vld_idx_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_vld is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->lat_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_lat is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->lon_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_lon is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->elv_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_elv is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + + if (header_data->typ_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_typ_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->sid_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_sid_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (header_data->vld_array.n() == 0) { + mlog << Error << "\n" << caller + << "The hdr_vld_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } +} + +//////////////////////////////////////////////////////////////////////// + +void check_obs_data(MetPointObsData *obs_data, bool use_var_id, const char *caller) { + + if (obs_data->qty_names.n() == 0) { + mlog << Error << "\n" << caller + << "The obs_qty_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + if (use_var_id && obs_data->var_names.n() == 0) { + mlog << Error << "\n" << caller + << "The obs_var_table is empty. Please check if python input is processed properly\n\n"; + exit (1); + } + +} //////////////////////////////////////////////////////////////////////// + +PyObject *get_python_object(PyObject *module_obj, const char *python_var_name) +{ + + // + // get the namespace for the module (as a dictionary) + // + + PyObject *module_dict_obj = PyModule_GetDict (module_obj); + + // + // get handles to the objects of interest from the module_dict + // + + PyObject *python_met_point_data = PyDict_GetItemString (module_dict_obj, python_var_name); + + return python_met_point_data; +} + + +//////////////////////////////////////////////////////////////////////// + + static void set_str_array_from_python(PyObject *python_data, const char *python_key, StringArray *out) { const char *method_name = "set_met_array_from_python(StringArray *) -> "; PyObject *str_array_obj = PyDict_GetItemString (python_data, python_key); @@ -63,7 +164,7 @@ static void set_str_array_from_python(PyObject *python_data, const char *python_ bool python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out) + MetPointDataPython &met_pd_out, MaskFilters *filters) { @@ -71,16 +172,15 @@ bool status = false; if ( user_ppath == 0 ) user_ppath = getenv(user_python_path_env); -if ( user_ppath != 0 ) { - // do_tmp_nc = true; +if ( user_ppath != 0 ) { // do_tmp_nc = true; status = tmp_nc_point_obs(script_name, script_argc, script_argv, - met_pd_out); + met_pd_out, filters); } else { status = straight_python_point_data(script_name, script_argc, script_argv, - use_xarray, met_pd_out); + met_pd_out, filters); } return ( status ); @@ -89,29 +189,21 @@ return ( status ); //////////////////////////////////////////////////////////////////////// -bool process_python_point_data(PyObject *module_obj, MetPointDataPython &met_pd_out) +bool process_point_data(PyObject *python_met_point_data, + MetPointDataPython &met_pd_out) + { int int_value; -PyObject *module_dict_obj = 0; PyObject *python_value = 0; -PyObject *python_met_point_data = 0; ConcatString cs, user_dir, user_base; -const char *method_name = "process_python_point_data -> "; -const char *method_name_s = "process_python_point_data()"; - - // - // get the namespace for the module (as a dictionary) - // - -module_dict_obj = PyModule_GetDict (module_obj); +const char *method_name = "process_point_data -> "; +const char *method_name_s = "process_point_data()"; // // get handles to the objects of interest from the module_dict // -python_met_point_data = PyDict_GetItemString (module_dict_obj, python_key_point_data); - python_value = PyDict_GetItemString (python_met_point_data, python_use_var_id); bool use_var_id = pyobject_as_bool(python_value); @@ -142,8 +234,7 @@ met_pd_out.allocate(int_value); MetPointObsData *obs_data = met_pd_out.get_point_obs_data(); MetPointHeader *header_data = met_pd_out.get_header_data(); - - // look up the data array variable name from the dictionary + // look up the data array variable name from the dictionary set_array_from_python(python_met_point_data, numpy_array_hdr_typ, &header_data->typ_idx_array); set_array_from_python(python_met_point_data, numpy_array_hdr_sid, &header_data->sid_idx_array); @@ -151,59 +242,17 @@ MetPointHeader *header_data = met_pd_out.get_header_data(); set_array_from_python(python_met_point_data, numpy_array_hdr_lat, &header_data->lat_array); set_array_from_python(python_met_point_data, numpy_array_hdr_lon, &header_data->lon_array); set_array_from_python(python_met_point_data, numpy_array_hdr_elv, &header_data->elv_array); - if (header_data->typ_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_typ is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->sid_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_sid is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->vld_idx_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_vld is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->lat_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_lat is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->lon_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_lon is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->elv_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_elv is empty. Please check if python input is processed properly\n\n"; - exit (1); - } set_str_array_from_python(python_met_point_data, numpy_array_hdr_typ_table, &header_data->typ_array); set_str_array_from_python(python_met_point_data, numpy_array_hdr_sid_table, &header_data->sid_array); set_str_array_from_python(python_met_point_data, numpy_array_hdr_vld_table, &header_data->vld_array); - if (header_data->typ_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_typ_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->sid_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_sid_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } - if (header_data->vld_array.n() == 0) { - mlog << Error << "\n" << method_name - << "The hdr_vld_table is empty. Please check if python input is processed properly\n\n"; - exit (1); - } + set_array_from_python(python_met_point_data, numpy_array_prpt_typ_table, &header_data->prpt_typ_array, false); set_array_from_python(python_met_point_data, numpy_array_irpt_typ_table, &header_data->irpt_typ_array, false); set_array_from_python(python_met_point_data, numpy_array_inst_typ_table, &header_data->inst_typ_array, false); + check_header_data(header_data, method_name); + set_array_from_python(python_met_point_data, numpy_array_obs_qty, obs_data->obs_qids); set_array_from_python(python_met_point_data, numpy_array_obs_hid, obs_data->obs_hids); set_array_from_python(python_met_point_data, numpy_array_obs_vid, obs_data->obs_ids); @@ -213,24 +262,173 @@ MetPointHeader *header_data = met_pd_out.get_header_data(); set_str_array_from_python(python_met_point_data, numpy_array_obs_qty_table, &obs_data->qty_names); set_str_array_from_python(python_met_point_data, numpy_array_obs_var_table, &obs_data->var_names); - if (obs_data->qty_names.n() == 0) { + + check_obs_data(obs_data, use_var_id, method_name); + + if(mlog.verbosity_level()>=point_data_debug_level) { + print_met_data(met_pd_out.get_point_obs_data(), + met_pd_out.get_header_data(), method_name_s); + } + + // + // done + // + +return ( true ); + +} + + +//////////////////////////////////////////////////////////////////////// + +bool process_point_data_list(PyObject *python_point_data, MetPointDataPython &met_pd_out, + MaskFilters *filters) +{ + + bool use_var_id; + Observation obs; + time_t vld_time; + int hid, vid, qid, sid, typ_idx, vld_idx; + double lat, lon, elv, hgt, level, obs_value; + double prev_lat, prev_lon, prev_elv, prev_vld, prev_typ, prev_sid; + Python3_List list(python_point_data); + const char *method_name = "process_point_data_list -> "; + const char *method_name_s = "process_point_data_list()"; + + int obs_cnt = list.size(); + if (obs_cnt == 0) { mlog << Error << "\n" << method_name - << "The obs_qty_table is empty. Please check if python input is processed properly\n\n"; + << "The point observation data is empty. Please check if python input is processed properly\n\n"; exit (1); } - if (use_var_id && obs_data->var_names.n() == 0) { + + // + // initialize use_var_id to false + // + + use_var_id = false; + hid = -1; // starts from -1 to be 0 for the first header + prev_lat = prev_lon = prev_elv = bad_data_double; + prev_vld = prev_typ = prev_sid = bad_data_double; + + met_pd_out.allocate(obs_cnt); + MetPointHeader *header_data = met_pd_out.get_header_data(); + MetPointObsData *obs_data = met_pd_out.get_point_obs_data(); + + for (int j=0; jis_filtered(lat, lon)) continue; + if (filters->is_filtered_sid(obs.getStationId().c_str())) continue; + if (filters->is_filtered_typ(obs.getHeaderType().c_str())) continue; + } + + // get message type index + str_data = obs.getHeaderType(); + if ( !header_data->typ_array.has(str_data, typ_idx) ) { + header_data->typ_array.add(str_data); + header_data->typ_array.has(str_data, typ_idx); + } + + // get station ID index + str_data = obs.getStationId(); + if ( !header_data->sid_array.has(str_data, sid) ) { + header_data->sid_array.add(str_data); + header_data->sid_array.has(str_data, sid); + } + + // get valid time index + vld_time = obs.getValidTime(); + if ( !header_data->vld_num_array.has(vld_time, vld_idx) ) { + header_data->vld_num_array.add(vld_time); + header_data->vld_num_array.has(vld_time, vld_idx); + } + + if (!is_eq(prev_lat, lat) || !is_eq(prev_lon, lon) || !is_eq(prev_elv, elv) + || !is_eq(prev_sid, sid) || !is_eq(prev_typ, typ_idx) + || !is_eq(prev_vld, vld_idx)) { + header_data->lat_array.add(lat); + header_data->lon_array.add(lon); + header_data->elv_array.add(elv); + header_data->sid_idx_array.add(sid); + header_data->typ_idx_array.add(typ_idx); + header_data->vld_idx_array.add(vld_idx); + header_data->vld_array.add(obs.getValidTimeString()); + + prev_lat = lat; + prev_lon = lon; + prev_elv = elv; + prev_sid = sid; + prev_typ = typ_idx; + prev_vld = vld_idx; + hid++; + } + obs_data->obs_hids[j] = hid; + + // get the observation variable code + str_data = obs.getVarName(); + if ( use_var_id || !is_number(str_data.c_str()) ) { + use_var_id = true; + // update the list of variable names + if ( !obs_data->var_names.has(str_data, vid) ) { + obs_data->var_names.add(str_data); + obs_data->var_names.has(str_data, vid); + } + } + else { + vid = atoi(obs.getVarName().c_str()); + } + obs_data->obs_ids[j] = vid; + obs.setVarCode(vid); + + // get the quality flag index + str_data = obs.getQualityFlag(); + if ( !obs_data->qty_names.has(str_data, qid) ) { + obs_data->qty_names.add(str_data); + obs_data->qty_names.has(str_data, qid); + } + obs_data->obs_qids[j] = qid; + obs_data->obs_lvls[j] = obs.getPressureLevel(); + obs_data->obs_hgts[j] = obs.getHeight(); + obs_data->obs_vals[j] = obs.getValue(); + + } // for j + + met_pd_out.set_use_var_id(use_var_id); + mlog << Debug(9) << method_name << "use_var_id: \"" << use_var_id + << "\" from python. is_using_var_id(): " << met_pd_out.is_using_var_id() << "\n"; + + if (hid <= 0) { mlog << Error << "\n" << method_name - << "The obs_var_table is empty. Please check if python input is processed properly\n\n"; + << "The header is empty. Please check the python script and input\n\n"; exit (1); } + met_pd_out.set_hdr_cnt(hid); - if(mlog.verbosity_level()>=point_data_debug_level) print_met_data(obs_data, header_data, method_name_s); + check_obs_data(obs_data, use_var_id, method_name); + check_header_data(header_data, method_name); + + if(mlog.verbosity_level()>=point_data_debug_level) { + print_met_data(met_pd_out.get_point_obs_data(), + met_pd_out.get_header_data(), method_name_s); + } // // done // -return ( true ); + return ( true ); } @@ -239,17 +437,14 @@ return ( true ); bool straight_python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out) + MetPointDataPython &met_pd_out, MaskFilters *filters) { int int_value; PyObject *module_obj = 0; -PyObject *module_dict_obj = 0; PyObject *python_value = 0; -PyObject *python_met_point_data = 0; ConcatString cs, user_dir, user_base; const char *method_name = "straight_python_point_data -> "; -const char *method_name_s = "straight_python_point_data()"; cs = script_name; @@ -349,9 +544,17 @@ if ( ! module_obj ) { } +bool result = false; +PyObject *met_point_data = get_python_object(module_obj, python_key_point_data); +if ( met_point_data ) { + result = process_point_data(met_point_data, met_pd_out); +} +else { + PyObject *point_data = get_python_object(module_obj, python_key_point_data_list); + result = process_point_data_list(point_data, met_pd_out, filters); +} -return process_python_point_data(module_obj, met_pd_out); - +return result; } @@ -359,7 +562,8 @@ return process_python_point_data(module_obj, met_pd_out); bool tmp_nc_point_obs(const char * user_script_name, int user_script_argc, - char ** user_script_argv, MetPointDataPython &met_pd_out) + char ** user_script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters) { @@ -402,6 +606,10 @@ command << cs_erase << replace_path(python_dir) << "\")"; run_python_string(command.text()); +mlog << Debug(0) << method_name << " -> added python path " + << python_dir << ") to python interpreter\n"; + +//setenv(env_PYTHONPATH, python_dir.c_str(),1); mlog << Debug(3) << "Running user-specified python instance (MET_PYTHON_EXE=" << user_ppath << ") to run user's python script (" << user_script_name << ").\n"; @@ -509,8 +717,14 @@ if ( ! module_obj ) { // -process_python_point_data(module_obj, met_pd_out); - +PyObject *met_point_data = get_python_object(module_obj, python_key_point_data); +if ( met_point_data ) { + process_point_data(met_point_data, met_pd_out); +} +else { + PyObject *point_data = get_python_object(module_obj, python_key_point_data_list); + process_point_data_list(point_data, met_pd_out, filters); +} // // cleanup diff --git a/src/libcode/vx_pointdata_python/python_pointdata.h b/src/libcode/vx_pointdata_python/python_pointdata.h index 5bfb87ca2e..284421b17c 100644 --- a/src/libcode/vx_pointdata_python/python_pointdata.h +++ b/src/libcode/vx_pointdata_python/python_pointdata.h @@ -16,6 +16,7 @@ //////////////////////////////////////////////////////////////////////// +#include "mask_filters.h" #include "met_point_data.h" @@ -29,6 +30,7 @@ extern "C" { //////////////////////////////////////////////////////////////////////// static const char python_key_point_data [] = "met_point_data"; +static const char python_key_point_data_list[] = "point_data"; static const char python_key_nhdr [] = "nhdr"; //static const char python_key_npbhdr [] = "npbhdr"; @@ -62,8 +64,10 @@ static const int point_data_debug_level = 10; //////////////////////////////////////////////////////////////////////// -extern bool python_point_data(const char * script_name, int script_argc, char ** script_argv, - const bool use_xarray, MetPointDataPython &met_pd_out); +extern bool python_point_data(const char * script_name, int script_argc, + char ** script_argv, MetPointDataPython &met_pd_out, + MaskFilters *filters); + //extern bool python_point_data(const char *python_command, const bool use_xarray, // MetPointData & po_out); extern void print_met_data(MetPointObsData *obs_data, MetPointHeader *header_data, From a5a16fe7ee86d58b04d025eb91bd5da40f1277f4 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:26:38 -0600 Subject: [PATCH 25/81] #2285 Uses MaskFilters --- src/tools/other/ascii2nc/file_handler.cc | 58 +++--------------------- src/tools/other/ascii2nc/file_handler.h | 25 +++------- 2 files changed, 12 insertions(+), 71 deletions(-) diff --git a/src/tools/other/ascii2nc/file_handler.cc b/src/tools/other/ascii2nc/file_handler.cc index f2a062aa7f..74020804f0 100644 --- a/src/tools/other/ascii2nc/file_handler.cc +++ b/src/tools/other/ascii2nc/file_handler.cc @@ -53,14 +53,6 @@ FileHandler::FileHandler(const string &program_name) : _nhdr(0), _hdrNum(0), _obsNum(0), - _gridMaskNum(0), - _areaMaskNum(0), - _polyMaskNum(0), - _sidMaskNum(0), - _gridMask(0), - _areaMask(0), - _polyMask(0), - _sidMask(0), use_var_id(false), do_monitor(false), deflate_level(DEF_DEFLATE_LEVEL), @@ -121,11 +113,11 @@ bool FileHandler::writeNetcdfFile(const string &nc_filename) // List the number of rejected observations. mlog << Debug(2) - << "Rejected " << _gridMaskNum + << "Rejected " << filters.get_grid_mask_cnt() << " observations off the masking grid.\n" - << "Rejected " << _areaMaskNum + _polyMaskNum + << "Rejected " << filters.get_area_mask_cnt() + filters.get_poly_mask_cnt() << " observations outside the masking polyline.\n" - << "Rejected " << _sidMaskNum + << "Rejected " << filters.get_sid_mask_cnt() << " observations not matched with station ID's.\n"; // Loop through the observations, counting the number of headers needed in @@ -274,52 +266,14 @@ bool FileHandler::_addObservations(const Observation &obs) double grid_x, grid_y; // - // Apply the grid mask + // Apply the grid mask, the area mask, and the polyline mask // - if(_gridMask) { - _gridMask->latlon_to_xy(obs.getLatitude(), -1.0*obs.getLongitude(), - grid_x, grid_y); - - if(grid_x < 0 || grid_x >= _gridMask->nx() || - grid_y < 0 || grid_y >= _gridMask->ny()) { - _gridMaskNum++; - return false; - } - - // - // Apply the area mask - // - if(_areaMask) { - if(!_areaMask->s_is_on(nint(grid_x), nint(grid_y))) { - _areaMaskNum++; - return false; - } - } - } - - // - // Apply the polyline mask - // - if(_polyMask) - { - if(!_polyMask->latlon_is_inside_dege(obs.getLatitude(), obs.getLongitude())) - { - _polyMaskNum++; - return false; - } - } + if(filters.is_filtered(obs.getLatitude(), obs.getLongitude())) return false; // // Apply the station ID mask // - if(_sidMask) - { - if(!_sidMask->has(obs.getStationId().c_str())) - { - _sidMaskNum++; - return false; - } - } + if(filters.is_filtered_sid(obs.getStationId().c_str())) return false; // Save obs because the obs vector is sorted after time summary _observations.push_back(obs); diff --git a/src/tools/other/ascii2nc/file_handler.h b/src/tools/other/ascii2nc/file_handler.h index 006f965d3e..ece575672a 100644 --- a/src/tools/other/ascii2nc/file_handler.h +++ b/src/tools/other/ascii2nc/file_handler.h @@ -25,6 +25,7 @@ #include #include "mask_poly.h" +#include "mask_filters.h" #include "vx_grid.h" #include "vx_config.h" #include "vx_util.h" @@ -92,15 +93,7 @@ class FileHandler int _hdrNum; int _obsNum; - int _gridMaskNum; - int _areaMaskNum; - int _polyMaskNum; - int _sidMaskNum; - - Grid *_gridMask; - MaskPlane *_areaMask; - MaskPoly *_polyMask; - StringArray *_sidMask; + MaskFilters filters; map _messageTypeMap; @@ -149,20 +142,14 @@ class FileHandler void _closeNetcdf(); bool _openNetcdf(const string &nc_filename); -// bool _writeHdrInfo(const ConcatString &hdr_typ, -// const ConcatString &hdr_sid, -// const time_t hdr_vld, -// double lat, double lon, double elv); -// bool _writeObsInfo(int gc, float prs, float hgt, float obs, -// const ConcatString &qty); void debug_print_observations(vector< Observation >, string); }; inline void FileHandler::setCompressionLevel(int compressoion_level) { deflate_level = compressoion_level; } -inline void FileHandler::setGridMask(Grid &g) { _gridMask = &g; } -inline void FileHandler::setAreaMask(MaskPlane &a) { _areaMask = &a; } -inline void FileHandler::setPolyMask(MaskPoly &p) { _polyMask = &p; } -inline void FileHandler::setSIDMask (StringArray &s) { _sidMask = &s; } +inline void FileHandler::setGridMask(Grid &g) { filters.set_grid_mask(&g); } +inline void FileHandler::setAreaMask(MaskPlane &a) { filters.set_area_mask(&a); } +inline void FileHandler::setPolyMask(MaskPoly &p) { filters.set_poly_mask(&p); } +inline void FileHandler::setSIDMask (StringArray &s) { filters.set_sid_mask(&s); } inline void FileHandler::setMessageTypeMap(map m) { _messageTypeMap = m; } From c80eceab4de376d5b148f725b63c0c249b85d2f3 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:27:31 -0600 Subject: [PATCH 26/81] #2285 Removed xarry --- src/libcode/vx_pointdata_python/pointdata_python.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libcode/vx_pointdata_python/pointdata_python.cc b/src/libcode/vx_pointdata_python/pointdata_python.cc index d349ec9bb8..fc5cfcc858 100644 --- a/src/libcode/vx_pointdata_python/pointdata_python.cc +++ b/src/libcode/vx_pointdata_python/pointdata_python.cc @@ -172,7 +172,9 @@ file_name = full_path; file_name.chomp(".py"); // remove possible ".py" suffix from script filename -bool status = python_point_data(file_name.c_str(), file_argc, file_argv, use_xarray, met_data); +MaskFilters *filters = 0; +bool status = python_point_data(file_name.c_str(), file_argc, file_argv, + met_data, filters); met_data.get_hdr_cnt(); met_data.get_obs_cnt(); From 1c7f41e938c74c414da8d0159c718be41cae0de2 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:28:18 -0600 Subject: [PATCH 27/81] #2285 Adjusted the path for python scripts --- internal/test_unit/xml/unit_gen_vx_mask.xml | 4 +- internal/test_unit/xml/unit_python.xml | 84 ++++++++++----------- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/internal/test_unit/xml/unit_gen_vx_mask.xml b/internal/test_unit/xml/unit_gen_vx_mask.xml index 342721af33..b83bb9a033 100644 --- a/internal/test_unit/xml/unit_gen_vx_mask.xml +++ b/internal/test_unit/xml/unit_gen_vx_mask.xml @@ -500,8 +500,8 @@ PYTHON_NUMPY \ &OUTPUT_DIR;/gen_vx_mask/PYTHON_FCST_or_OBS_mask.nc \ -type data \ - -input_field 'name="&MET_BASE;/python/read_ascii_numpy.py &MET_DATA;/python/fcst.txt FCST";' \ - -mask_field 'name="&MET_BASE;/python/read_ascii_numpy.py &MET_DATA;/python/obs.txt OBS";' \ + -input_field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &MET_DATA;/python/fcst.txt FCST";' \ + -mask_field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &MET_DATA;/python/obs.txt OBS";' \ -thresh gt0 -union -v 3 diff --git a/internal/test_unit/xml/unit_python.xml b/internal/test_unit/xml/unit_python.xml index 5a519d9212..051f709a62 100644 --- a/internal/test_unit/xml/unit_python.xml +++ b/internal/test_unit/xml/unit_python.xml @@ -31,7 +31,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_name.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid Name: 'G212'" \ -v 1 @@ -53,7 +53,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_string.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid String: 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N'" \ -v 1 @@ -74,7 +74,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_data_file.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Gridded Data File: 'wrfprs_ruc13_12.tm00_G212'" \ -v 1 @@ -90,7 +90,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled numpy plot_data_plane" \ -v 1 @@ -105,7 +105,7 @@ \ PYTHON_XARRAY \ &OUTPUT_DIR;/python/letter_xarray.ps \ - 'name = "&MET_BASE;/python/read_ascii_xarray.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_xarray.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled xarray plot_data_plane" \ -v 1 @@ -120,7 +120,7 @@ \ &DATA_DIR_PYTHON;/letter.txt \ &OUTPUT_DIR;/python/letter_file_type.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG LETTER"; file_type=PYTHON_NUMPY;' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG LETTER"; file_type=PYTHON_NUMPY;' \ -plot_range 0.0 255.0 \ -title "Python enabled plot_data_plane using file_type option" \ -v 1 @@ -133,7 +133,7 @@ &MET_BIN;/mode - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ &OUTPUT_DIR;/pcp_combine/arw-tom-gep0_2012040912_F030_APCP06.nc \ @@ -152,8 +152,8 @@ &MET_BIN;/mode - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -172,7 +172,7 @@ &MET_BIN;/grid_stat - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ &DATA_DIR_MODEL;/grib1/nam_st4/nam_2012040900_F012_gSt4.grib \ @@ -189,8 +189,8 @@ &MET_BIN;/grid_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -206,8 +206,8 @@ &MET_BIN;/point_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -224,8 +224,8 @@ &MET_BIN;/wavelet_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/obs.txt OBS \ PYTHON_NUMPY \ @@ -244,7 +244,7 @@ &MET_BIN;/wavelet_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST \ PYTHON_NUMPY \ @@ -266,7 +266,7 @@ PYTHON_NUMPY \ G130 \ &OUTPUT_DIR;/python/regrid_data_plane.nc \ - -field 'name="&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ + -field 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ -v 1 @@ -279,7 +279,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/shift_data_plane.nc \ - 'name="&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ + 'name="&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/fcst.txt FCST";' \ -from 30 -110 -to 35 -115 \ -v 1 @@ -293,7 +293,7 @@ \ &DATA_DIR_PYTHON;/fcst.txt \ &OUTPUT_DIR;/python/shift_data_plane_input_arg.nc \ - 'name="&MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; file_type=PYTHON_NUMPY;' \ + 'name="&MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; file_type=PYTHON_NUMPY;' \ -from 30 -110 -to 35 -115 \ -v 1 @@ -305,8 +305,8 @@ &MET_BIN;/series_analysis - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS \ -fcst &DATA_DIR_PYTHON;/fcst.txt &DATA_DIR_PYTHON;/fcst.txt \ @@ -324,8 +324,8 @@ &MET_BIN;/mtd - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS OUTPUT_PREFIX PYTHON \ @@ -348,8 +348,8 @@ &MET_BIN;/ensemble_stat - FCST_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST - OBS_COMMAND &MET_BASE;/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS + FCST_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST + OBS_COMMAND &MET_BASE;/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG OBS OUTPUT_PREFIX PYTHON \ @@ -369,7 +369,7 @@ &MET_BIN;/ascii2nc \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ &OUTPUT_DIR;/python/ascii2nc_python.nc \ -format python @@ -382,7 +382,7 @@ &MET_BIN;/ascii2nc \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs_varname.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs_varname.txt" \ &OUTPUT_DIR;/python/ascii2nc_python_varname.nc \ -format python @@ -395,7 +395,7 @@ &MET_BIN;/stat_analysis \ - -lookin python &MET_BASE;/python/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ + -lookin python &MET_BASE;/python/examples/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ -job aggregate_stat -line_type MPR -out_line_type sl1l2 -by FCST_VAR \ -out_stat &OUTPUT_DIR;/python/stat_analysis_python_AGGR_MPR_to_SL1L2.stat @@ -415,7 +415,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_numpy_grid_name_user_python.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Grid Name: 'G212'" \ -v 1 @@ -433,7 +433,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - "&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ + "&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/sample_ascii_obs.txt" \ &OUTPUT_DIR;/python/ascii2nc_user_python.nc \ -format python @@ -453,7 +453,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/letter_user_python.ps \ - 'name = "&MET_BASE;/python/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + 'name = "&MET_BASE;/python/examples/read_ascii_numpy.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ -title "Python enabled plot_data_plane" \ -v 1 @@ -471,7 +471,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - -lookin python &MET_BASE;/python/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ + -lookin python &MET_BASE;/python/examples/read_ascii_mpr.py &OUTPUT_DIR;/python/point_stat_120000L_20050807_120000V.stat \ -job aggregate_stat -line_type MPR -out_line_type sl1l2 -by FCST_VAR \ -out_stat &OUTPUT_DIR;/python/stat_analysis_user_python_AGGR_MPR_to_SL1L2.stat @@ -483,7 +483,7 @@ &MET_BIN;/point2grid \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ G212 \ &OUTPUT_DIR;/python/pb2nc_TMP.nc \ -field 'name="TMP"; level="*"; valid_time="20120409_120000"; censor_thresh=[ <0 ]; censor_val=[0];' \ @@ -502,7 +502,7 @@ MET_PYTHON_EXE &MET_PYTHON_EXE; \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ G212 \ &OUTPUT_DIR;/python/pb2nc_TMP_user_python.nc \ -field 'name="TMP"; level="*"; valid_time="20120409_120000"; censor_thresh=[ <0 ]; censor_val=[0];' \ @@ -520,7 +520,7 @@ TO_GRID NONE \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_ascii_point.py &MET_DATA;/sample_obs/ascii/precip24_2010010112.ascii' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_ascii_point.py &MET_DATA;/sample_obs/ascii/precip24_2010010112.ascii' \ &OUTPUT_DIR;/python/precip24_2010010112.ps \ -config &CONFIG_DIR;/PlotPointObsConfig \ -plot_grid &DATA_DIR_MODEL;/grib2/nam/nam_2012040900_F012.grib2 \ @@ -538,9 +538,9 @@ TO_GRID NONE \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc' \ &OUTPUT_DIR;/python/nam_and_ndas.20120409.t12z.prepbufr_CONFIG.ps \ - -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/trmm_2012040912_3hr.nc' \ + -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/trmm_2012040912_3hr.nc' \ -plot_grid &DATA_DIR_MODEL;/grib2/nam/nam_2012040900_F012.grib2 \ -config &CONFIG_DIR;/PlotPointObsConfig \ -title "NAM 2012040900 F12 vs NDAS 500mb RH and TRMM 3h > 0" \ @@ -570,7 +570,7 @@ &OUTPUT_DIR;/python/ensemble_stat/input_file_list \ &CONFIG_DIR;/EnsembleStatConfig \ -grid_obs &DATA_DIR_OBS;/laps/laps_2012041012_F000.grib \ - -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/gauge_2012041012_24hr.nc' \ + -point_obs 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/ascii2nc/gauge_2012041012_24hr.nc' \ -outdir &OUTPUT_DIR;/python/ensemble_stat -v 1 @@ -595,7 +595,7 @@ \ &DATA_DIR_MODEL;/grib1/nam/nam_2012040900_F012.grib \ - 'PYTHON_NUMPY=&MET_BASE;/python/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/gdas1.20120409.t12z.prepbufr.nc' \ + 'PYTHON_NUMPY=&MET_BASE;/python/examples/read_met_point_obs.py &OUTPUT_DIR;/pb2nc/gdas1.20120409.t12z.prepbufr.nc' \ &CONFIG_DIR;/PointStatConfig_WINDS \ -outdir &OUTPUT_DIR;/python -v 1 @@ -609,7 +609,7 @@ \ PYTHON_NUMPY \ &OUTPUT_DIR;/python/wrfout_d01_2008-08-08_12_00_00_PLEV_ZONAL_MEAN.ps \ - 'name="&MET_BASE;/python/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lat";' \ + 'name="&MET_BASE;/python/examples/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lat";' \ -title "WRF Zonal Mean" \ -v 1 @@ -622,7 +622,7 @@ &MET_BIN;/pcp_combine \ -add PYTHON_NUMPY \ - 'name="&MET_BASE;/python/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lon";' \ + 'name="&MET_BASE;/python/examples/derive_WRF_semilatlon.py &DATA_DIR_MODEL;/p_interp/wrfout_d01_2008-08-08_12:00:00_PLEV TT lon";' \ &OUTPUT_DIR;/python/wrfout_d01_2008-08-08_12_00_00_PLEV_MERIDIONAL_MEAN.nc \ -name "TT_MERIDIONAL_MEAN" -v 1 From c355f4b831ee6b6337e07ed872fafe5ed83c7930 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 7 Apr 2023 14:29:03 -0600 Subject: [PATCH 28/81] #2285 Adjusted the path for python scripts --- docs/Users_Guide/appendixF.rst | 12 ++++++------ docs/Users_Guide/installation.rst | 2 +- docs/Users_Guide/plotting.rst | 2 +- docs/Users_Guide/reformat_point.rst | 8 ++++---- docs/Users_Guide/stat-analysis.rst | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index c490cc07e3..877ac360da 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -259,7 +259,7 @@ The ASCII2NC tool supports the "-format python" option. With this option, point .. code-block:: none ascii2nc -format python \ - "MET_BASE/python/read_ascii_point.py sample_ascii_obs.txt" \ + "MET_BASE/python/examples/read_ascii_point.py sample_ascii_obs.txt" \ sample_ascii_obs_python.nc The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process point observations. They support Python embedding of point observations directly on the command line by replacing the input MET NetCDF point observation file name with the Python command to be run. The Python command must begin with the prefix 'PYTHON_NUMPY=' and be followed by the path to the User's Python script and any arguments. The full command should be enclosed in single quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below: @@ -267,14 +267,14 @@ The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process .. code-block:: none plot_point_obs \ - "PYTHON_NUMPY=MET_BASE/python/read_ascii_point.py sample_ascii_obs.txt" \ + "PYTHON_NUMPY=MET_BASE/python/examples/read_ascii_point.py sample_ascii_obs.txt" \ output_image.ps Both of the above examples use the **read_ascii_point.py** sample script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** sample script can be found in: -• MET installation directory in *MET_BASE/python*. +• MET installation directory in *MET_BASE/python/examples*. -• `MET GitHub repository `_ in *met/scripts/python*. +• `MET GitHub repository `_ in *met/scripts/python/examples*. .. _pyembed-mpr-data: @@ -285,6 +285,6 @@ The Stat-Analysis tool supports the "-lookin python" option. With this option, m The **read_ascii_mpr.py** sample script can be found in: -• MET installation directory in *MET_BASE/python*. +• MET installation directory in *MET_BASE/python/examples*. -• `MET GitHub repository `_ in *met/scripts/python*. +• `MET GitHub repository `_ in *met/scripts/python/examples*. diff --git a/docs/Users_Guide/installation.rst b/docs/Users_Guide/installation.rst index 9db4d6993f..b3c04826dc 100644 --- a/docs/Users_Guide/installation.rst +++ b/docs/Users_Guide/installation.rst @@ -122,7 +122,7 @@ MET Directory Structure The top-level MET directory consists of Makefiles, configuration files, and several subdirectories. The top-level Makefile and configuration files control how the entire toolkit is built. Instructions for using these files to build MET can be found in :numref:`Install_Building-the-MET`. -When MET has been successfully built and installed, the installation directory contains two subdirectories. The *bin/* directory contains executables for each module of MET as well as several plotting utilities. The *share/met/* directory contains many subdirectories with data required at runtime and a subdirectory of sample R scripts utilities. The *colortables/*, *map/*, and *ps/* subdirectories contain data used in creating PostScript plots for several MET tools. The *poly/* subdirectory contains predefined lat/lon polyline regions for use in selecting regions over which to verify. The polylines defined correspond to verification regions used by NCEP as described in :numref:`Appendix B, Section %s `. The *config/* directory contains default configuration files for the MET tools. The *python/* subdirectory contains sample scripts used in Python embedding (:numref:`Appendix F, Section %s `). The *table_files/* and *tc_data/* subdirectories contain GRIB table definitions and tropical cyclone data, respectively. The *Rscripts/* subdirectory contains a handful of plotting graphic utilities for MET-TC. These are the same Rscripts that reside under the top-level MET *scripts/Rscripts* directory, other than it is the installed location. The *wrappers/* subdirectory contains code used in Python embedding (:numref:`Appendix F, Section %s `). +When MET has been successfully built and installed, the installation directory contains two subdirectories. The *bin/* directory contains executables for each module of MET as well as several plotting utilities. The *share/met/* directory contains many subdirectories with data required at runtime and a subdirectory of sample R scripts utilities. The *colortables/*, *map/*, and *ps/* subdirectories contain data used in creating PostScript plots for several MET tools. The *poly/* subdirectory contains predefined lat/lon polyline regions for use in selecting regions over which to verify. The polylines defined correspond to verification regions used by NCEP as described in :numref:`Appendix B, Section %s `. The *config/* directory contains default configuration files for the MET tools. The *python/* subdirectory contains python scripts. The *python/examples* subdirectory contains sample scripts used in Python embedding (:numref:`Appendix F, Section %s `). The *python/pyembed/* subdirectory contains code used in Python embedding (:numref:`Appendix F, Section %s `). The *table_files/* and *tc_data/* subdirectories contain GRIB table definitions and tropical cyclone data, respectively. The *Rscripts/* subdirectory contains a handful of plotting graphic utilities for MET-TC. These are the same Rscripts that reside under the top-level MET *scripts/Rscripts* directory, other than it is the installed location. The *data/* directory contains several configuration and static data files used by MET. The *sample_fcst/* and *sample_obs/* subdirectories contain sample data used by the test scripts provided in the *scripts/* directory. diff --git a/docs/Users_Guide/plotting.rst b/docs/Users_Guide/plotting.rst index 1db3b4be91..1ac44e2f7e 100644 --- a/docs/Users_Guide/plotting.rst +++ b/docs/Users_Guide/plotting.rst @@ -71,7 +71,7 @@ An equivalent command using python embedding for point observations is shown bel .. code-block:: none - plot_point_obs 'PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py sample_pb.nc' sample_data.ps + plot_point_obs 'PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py sample_pb.nc' sample_data.ps Please see section :numref:`pyembed-point-obs-data` for more details about Python embedding in MET. diff --git a/docs/Users_Guide/reformat_point.rst b/docs/Users_Guide/reformat_point.rst index 1cd9b4705d..809639c249 100644 --- a/docs/Users_Guide/reformat_point.rst +++ b/docs/Users_Guide/reformat_point.rst @@ -1042,7 +1042,7 @@ Required arguments for point2grid 1. The **input_filename** argument indicates the name of the input file to be processed. The input can be a MET NetCDF point observation file generated by other MET tools or a NetCDF AOD dataset from GOES16/17. Python embedding for point observations is also supported, as described in :numref:`pyembed-point-obs-data`. -The MET point observation NetCDF file name as **input_filename** argument is equivalent with "PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py netcdf_file name'. +The MET point observation NetCDF file name as **input_filename** argument is equivalent with "PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py netcdf_filename". 2. The **to_grid** argument defines the output grid as: (1) a named grid, (2) the path to a gridded data file, or (3) an explicit grid specification string. @@ -1100,7 +1100,7 @@ Listed below is an example of processing the same set of observations but using .. code-block:: none point2grid \ - 'PYTHON_NUMPY=MET_BASE/python/read_met_point_obs.py ascii2nc_edr_hourly.20130827.nc' \ + 'PYTHON_NUMPY=MET_BASE/python/examples/read_met_point_obs.py ascii2nc_edr_hourly.20130827.nc' \ G212 python_gridded_ascii_python.nc -config Point2GridConfig_edr \ -field 'name="200"; level="*"; valid_time="20130827_205959";' -method MAX -v 1 @@ -1191,10 +1191,10 @@ The script can be found at: .. code-block:: none - MET_BASE/utility/print_pointnc2ascii.py + MET_BASE/python/utility/print_pointnc2ascii.py For how to use the script, issue the command: .. code-block:: none - python3 MET_BASE/utility/print_pointnc2ascii.py -h + python3 MET_BASE/python/utility/print_pointnc2ascii.py -h diff --git a/docs/Users_Guide/stat-analysis.rst b/docs/Users_Guide/stat-analysis.rst index 1c1f1db4c0..602920c22d 100644 --- a/docs/Users_Guide/stat-analysis.rst +++ b/docs/Users_Guide/stat-analysis.rst @@ -323,7 +323,7 @@ The example below uses Python embedding. .. code-block:: none stat_analysis \ - -lookin python MET_BASE/python/read_ascii_mpr.py point_stat_mpr.txt \ + -lookin python MET_BASE/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ -job aggregate_stat -line_type MPR -out_line_type CNT \ -by FCST_VAR,FCST_LEV From d6086ece3ffd1264d2fce1db8c585c8bc3c5e18c Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 10 Apr 2023 10:47:47 -0600 Subject: [PATCH 29/81] #2285 Imnport importlib.util instead of importlib --- scripts/python/pyembed/python_embedding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py index f627b6ccd9..be01188c8f 100644 --- a/scripts/python/pyembed/python_embedding.py +++ b/scripts/python/pyembed/python_embedding.py @@ -18,7 +18,7 @@ import os import sys -import importlib +import importlib.util class pyembed_tools(): From 3074aa8ed7c71a33496588abbd4738e760a150b8 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 10 Apr 2023 12:19:01 -0600 Subject: [PATCH 30/81] #2285 Change back --- scripts/python/pyembed/python_embedding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py index be01188c8f..f627b6ccd9 100644 --- a/scripts/python/pyembed/python_embedding.py +++ b/scripts/python/pyembed/python_embedding.py @@ -18,7 +18,7 @@ import os import sys -import importlib.util +import importlib class pyembed_tools(): From ddfe1c355f001b9e4fb64ce4b385068b5fdddde1 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 10 Apr 2023 13:38:06 -0600 Subject: [PATCH 31/81] #2285 Changed importing importlib --- scripts/python/pyembed/python_embedding.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py index f627b6ccd9..434679b185 100644 --- a/scripts/python/pyembed/python_embedding.py +++ b/scripts/python/pyembed/python_embedding.py @@ -18,7 +18,7 @@ import os import sys -import importlib +from importlib import util as import_util class pyembed_tools(): @@ -78,8 +78,8 @@ def call_python(argv): user_base = os.path.basename(pyembed_module_name).replace('.py','') - spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) - met_in = importlib.util.module_from_spec(spec) + spec = import_util.spec_from_file_location(user_base, pyembed_module_name) + met_in = import_util.module_from_spec(spec) spec.loader.exec_module(met_in) return met_in From 2feccc94fd8145164b11aa151a0746b1cadb8a6b Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 10 Apr 2023 14:20:24 -0600 Subject: [PATCH 32/81] #2285 Merged scripts/python/met_point_obs_nc.py to scripts/python/met/point.py --- scripts/python/met_point_obs_nc.py | 281 ----------------------------- 1 file changed, 281 deletions(-) delete mode 100644 scripts/python/met_point_obs_nc.py diff --git a/scripts/python/met_point_obs_nc.py b/scripts/python/met_point_obs_nc.py deleted file mode 100644 index e6680c0689..0000000000 --- a/scripts/python/met_point_obs_nc.py +++ /dev/null @@ -1,281 +0,0 @@ -#!/usr/bin/env python3 - -''' -Separated from read_met_point_obs on Feb 09, 2023 - -@author: hsoh - -This script reads the MET point observation NetCDF file like MET tools do. -''' - -import os -import sys -from datetime import datetime -import numpy as np -import netCDF4 as nc - -from met_point_obs import met_point_obs, base_met_point_obs, get_prompt - -DO_PRINT_DATA = False -ARG_PRINT_DATA = 'show_data' - -# Note: caller should import netCDF4 -# the argements nc_group(dataset) and nc_var should not be None -class nc_tools(): - - met_missing = -99999999. - - @staticmethod - def get_num_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_var[:] - - @staticmethod - def get_ncbyte_array_to_str(nc_var): - nc_str_data = nc_var[:] - if nc_var.datatype.name == 'bytes8': - nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] - return nc_str_data - - @staticmethod - def get_string_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) - - -class nc_point_obs(met_point_obs): - - # args should be string, list, or dictionary - def get_nc_filename(self, args): - nc_filename = None - if isinstance(args, dict): - nc_filename = args.get('nc_name',None) - elif isinstance(args, list): - nc_filename = args[0] - elif args != ARG_PRINT_DATA: - nc_filename = args - - return nc_filename - - def read_data(self, nc_filename): - if nc_filename is None: - self.log_error_msg("The input NetCDF filename is missing") - elif not os.path.exists(nc_filename): - self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") - else: - dataset = nc.Dataset(nc_filename, 'r') - - attr_name = 'use_var_id' - use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" - self.use_var_id = use_var_id_str.lower() == 'true' - - # Header - self.hdr_typ = dataset['hdr_typ'][:] - self.hdr_sid = dataset['hdr_sid'][:] - self.hdr_vld = dataset['hdr_vld'][:] - self.hdr_lat = dataset['hdr_lat'][:] - self.hdr_lon = dataset['hdr_lon'][:] - self.hdr_elv = dataset['hdr_elv'][:] - self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') - self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') - self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') - - nc_var = dataset.variables.get('obs_unit', None) - if nc_var: - self.obs_var_unit = nc_var[:] - nc_var = dataset.variables.get('obs_desc', None) - if nc_var: - self.obs_var_desc = nc_var[:] - - nc_var = dataset.variables.get('hdr_prpt_typ', None) - if nc_var: - self.hdr_prpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_irpt_typ', None) - if nc_var: - self.hdr_irpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_inst_typ', None) - if nc_var: - self.hdr_inst_typ =nc_var[:] - - #Observation data - self.hdr_sid = dataset['hdr_sid'][:] - self.obs_qty = np.array(dataset['obs_qty'][:]) - self.obs_hid = np.array(dataset['obs_hid'][:]) - self.obs_lvl = np.array(dataset['obs_lvl'][:]) - self.obs_hgt = np.array(dataset['obs_hgt'][:]) - self.obs_val = np.array(dataset['obs_val'][:]) - nc_var = dataset.variables.get('obs_vid', None) - if nc_var is None: - self.use_var_id = False - nc_var = dataset.variables.get('obs_gc', None) - else: - self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') - if nc_var: - self.obs_vid = np.array(nc_var[:]) - - self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') - - def save_ncfile(self, nc_filename): - met_data = self.get_point_data() - with nc.Dataset(nc_filename, 'w') as nc_dataset: - self.set_nc_data(nc_dataset) - return met_data - - def set_nc_data(self, nc_dataset): - return nc_point_obs.write_nc_data(nc_dataset, self) - - @staticmethod - def write_nc_file(nc_filename, point_obs): - with nc.Dataset(nc_filename, 'w') as nc_dataset: - nc_point_obs.set_nc_data(nc_dataset, point_obs) - - @staticmethod - def write_nc_data(nc_dataset, point_obs): - do_nothing = False - if 0 == point_obs.nhdr: - do_nothing = True - base_met_point_obs.info_msg("the header is empty") - if 0 == point_obs.nobs: - do_nothing = True - base_met_point_obs.info_msg("the observation data is empty") - if do_nothing: - print() - return - - # Set global attributes - nc_dataset.MET_Obs_version = "1.02" ; - nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" - - # Create dimensions - nc_dataset.createDimension('mxstr', 16) - nc_dataset.createDimension('mxstr2', 40) - nc_dataset.createDimension('mxstr3', 80) - nc_dataset.createDimension('nhdr', point_obs.nhdr) - nc_dataset.createDimension('nobs', point_obs.nobs) - #npbhdr = len(point_obs.hdr_prpt_typ) - if 0 < point_obs.npbhdr: - nc_dataset.createDimension('npbhdr', point_obs.npbhdr) - nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) - nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) - nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) - nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) - nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) - - type_for_string = 'S1' # np.byte - dims_hdr = ('nhdr',) - dims_obs = ('nobs',) - - # Create header and observation variables - var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) - var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) - var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) - var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) - - var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) - var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) - var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) - var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) - var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) - var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) - - if 0 == point_obs.npbhdr: - var_hdr_prpt_typ = None - var_hdr_irpt_typ = None - var_hdr_inst_typ = None - else: - dims_npbhdr = ('npbhdr',) - var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) - - var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) - var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) - var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) - var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) - var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) - - # Set variables - var_hdr_typ[:] = point_obs.hdr_typ[:] - var_hdr_sid[:] = point_obs.hdr_sid[:] - var_hdr_vld[:] = point_obs.hdr_vld[:] - var_hdr_lat[:] = point_obs.hdr_lat[:] - var_hdr_lon[:] = point_obs.hdr_lon[:] - var_hdr_elv[:] = point_obs.hdr_elv[:] - for i in range(0, point_obs.nhdr_typ): - for j in range(0, len(point_obs.hdr_typ_table[i])): - var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] - for i in range(0, point_obs.nhdr_sid): - for j in range(0, len(point_obs.hdr_sid_table[i])): - var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] - for i in range(0, point_obs.nhdr_vld): - for j in range(0, len(point_obs.hdr_vld_table[i])): - var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] - if 0 < point_obs.npbhdr: - var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] - var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] - var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] - - var_obs_qty[:] = point_obs.obs_qty[:] - var_obs_hid[:] = point_obs.obs_hid[:] - var_obs_vid[:] = point_obs.obs_vid[:] - var_obs_lvl[:] = point_obs.obs_lvl[:] - var_obs_hgt[:] = point_obs.obs_hgt[:] - var_obs_val[:] = point_obs.obs_val[:] - for i in range(0, point_obs.nobs_var): - for j in range(0, len(point_obs.obs_var_table[i])): - var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] - var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] - var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] - for i in range(0, point_obs.nobs_qty): - for j in range(0, len(point_obs.obs_qty_table[i])): - var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] - - # Set variable attributes - var_hdr_typ.long_name = "index of message type" - var_hdr_sid.long_name = "index of station identification" - var_hdr_vld.long_name = "index of valid time" - var_hdr_lat.long_name = "latitude" - var_hdr_lat.units = "degrees_north" - var_hdr_lon.long_name = "longitude" - var_hdr_lon.units = "degrees_east" - var_hdr_elv.long_name = "elevation" - var_hdr_elv.units = "meters above sea level (msl)" - - var_obs_qty.long_name = "index of quality flag" - var_obs_hid.long_name = "index of matching header data" - var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" - var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" - var_obs_hgt.long_name = "height in meters above sea level (msl)" - var_obs_val.long_name = "observation value" - var_hdr_typ_table.long_name = "message type" - var_hdr_sid_table.long_name = "station identification" - var_hdr_vld_table.long_name = "valid time" - var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" - var_obs_qty_table.long_name = "quality flag" - var_obs_var_table.long_name = "variable names" - var_obs_var_unit.long_name = "variable units" - var_obs_var_desc.long_name = "variable descriptions" - - -def main(argv): - if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: - netcdf_filename = argv[1] - tmp_nc_name = 'tmp_met_point.nc' - point_obs_data = nc_point_obs() - point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) - met_point_data = point_obs_data.save_ncfile(tmp_nc_name) - print(f'{get_prompt()} saved met_point_data to {tmp_nc_name}') - met_point_data['met_point_data'] = point_obs_data - - if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: - met_point_obs.print_point_data(met_point_data) - -if __name__ == '__main__': - start_time = datetime.now() - main(sys.argv) - run_time = datetime.now() - start_time - print(f'{get_prompt()} Done python script {sys.argv[0]} took {run_time}') From 09aaf11207a4d15d9b745685a25c782a71e91432 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 09:28:15 -0600 Subject: [PATCH 33/81] Fixes typo in installation chapter. --- docs/Users_Guide/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/installation.rst b/docs/Users_Guide/installation.rst index b3c04826dc..534c5dece4 100644 --- a/docs/Users_Guide/installation.rst +++ b/docs/Users_Guide/installation.rst @@ -209,7 +209,7 @@ The following environment variables should also be set: MET_PYTHON_CC='-I/usr/include/python3.6' MET_PYTHON_LD='-L/usr/lib/python3.6/config-x86_64-linux-gnu -lpython3.6m' - Note that this version of Python must include support for a minimum set of required pacakges. For more information about Python support in MET, including the list of required packages, please refer to :numref:`Appendix F, Section %s `. + Note that this version of Python must include support for a minimum set of required packages. For more information about Python support in MET, including the list of required packages, please refer to :numref:`Appendix F, Section %s `. * If compiling MODIS-Regrid and/or lidar2nc, set $MET_HDF to point to the main HDF4 directory, or set $MET_HDFINC to point to the directory with the HDF4 include files and set $MET_HDFLIB to point to the directory with the HDF4 library files. Also, set $MET_HDFEOS to point to the main HDF EOS directory, or set $MET_HDFEOSINC to point to the directory with the HDF EOS include files and set $MET_HDFEOSLIB to point to the directory with the HDF EOS library files. From 4e4cc428ca7f23b802a107f92207a48897c74b8f Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 09:36:55 -0600 Subject: [PATCH 34/81] Updates to reflect new directory structure. --- docs/Users_Guide/appendixF.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 877ac360da..cd95ff4324 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -215,7 +215,7 @@ Listed below is an example of running the Plot-Data-Plane tool to call a Python .. code-block:: none plot_data_plane PYTHON_NUMPY fcst.ps \ - 'name="scripts/python/read_ascii_numpy.py data/python/fcst.txt FCST";' \ + 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ -title "Python enabled plot_data_plane" The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling a NumPy Python script, set this to the constant string PYTHON_NUMPY. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the Python script to be run along with command line arguments. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages. @@ -245,7 +245,7 @@ The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists .. code-block:: none plot_data_plane data/python/fcst.txt fcst.ps \ - 'name="scripts/python/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ + 'name="scripts/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ file_type=PYTHON_NUMPY;' \ -title "Python enabled plot_data_plane" @@ -287,4 +287,4 @@ The **read_ascii_mpr.py** sample script can be found in: • MET installation directory in *MET_BASE/python/examples*. -• `MET GitHub repository `_ in *met/scripts/python/examples*. +• `MET GitHub repository `_ in *MET/scripts/python/examples*. From 95f4964ce1dd46b3aa0c9d74a64e1d961514c90a Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 10:28:29 -0600 Subject: [PATCH 35/81] Changes to the section about building MET with Python Embedding support. --- docs/Users_Guide/appendixF.rst | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index cd95ff4324..c1439fcd02 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -7,20 +7,26 @@ Appendix F Python Embedding Introduction ============ -MET includes the ability to embed Python to a limited degree. Users may use Python scripts and whatever associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. +MET includes the ability to embed Python to a limited degree. Users may use their own Python scripts and whatever associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. .. _compiling_python_support: -Compiling Python Support -======================== +Compiling MET for Python Embedding +================================== -In order to use Python embedding, the user's local Python installation must have the C-language Python header files and libraries. Sometimes when Python is installed locally, these header files and libraries are deleted at the end of the installation process, leaving only the binary executable and run-time shared object files. But the Python header files and libraries must be present to compile support in MET for Python embedding. Assuming the requisite Python files are present, and that Python embedding is enabled when building MET (which is done by passing the **--enable-python** option to the **configure** command line), the MET C++ code will use these in the compilation process to link directly to the Python libraries. +In order to use Python embedding, a local Python installation must be available when compiling the MET software with the following requirements: -The local Python installation must also support a minimum set of required packages. The MET build includes some python wrapper scripts to facilitate the passing of data in memory as well as the reading and writing of temporary files. The packages required by those wrapper scripts are **sys, os, argparse, importlib, numpy and netCDF4**. While most of these are standard packages and readily available, numpy and netCDF4 may not be. Users are advised to confirm their availability prior to compiling MET with python embedding support. +1. C-language Python header files and libraries -In addition to the **configure** option mentioned above, three variables, **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**, must also be set for the configuration process. These may either be set as environment variables or as command line options to **configure**. These constants are passed as compiler command line options when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: +2. **NumPy** Python package -• Set **MET_PYTHON_BIN_EXE** to the full path of the desired python executable. +3. **netCDF4** Python package + +Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. + +In addition to using **\-\-enable-python** with **configure** as mentioned above, the following environment variables must also be set prior to executing **configure**: **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**. These may either be set as environment variables or as command line options to **configure**. These environment variables are used when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable in the users local Python installation, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: + +• Set **MET_PYTHON_BIN_EXE** to the full path of the desired Python executable. • On the command line, run "**python3-config --cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. From 7e18acfcc4e91457c3b274d3e157979faf92adf4 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 11:02:27 -0600 Subject: [PATCH 36/81] Changes to the section about MET_PYTHON_EXE. --- docs/Users_Guide/appendixF.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index c1439fcd02..cefd259093 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -35,26 +35,26 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. -MET_PYTHON_EXE -============== +Controlling Which Python MET uses with MET_PYTHON_EXE +===================================================== -When Python embedding support is compiled, MET instantiates the Python interpreter directly. However, for users of highly configurable Conda environments, the Python instance set at compilation time may not be sufficient. Users may want to switch between Conda environments for which different packages are available. MET version 9.0 has been enhanced to address this need. +When MET is compiled with Python embedding support, MET uses the Python executable in that Python installation by default when Python embedding is used. However, for users of highly configurable Python environments, the Python instance set at compilation time may not be sufficient. Users may want to use an alternate Python installation if they need additional packages not available in the Python installation used when compiling MET. In MET versions 9.0+, users have the ability to use a different Python executable when running MET than the version used when compiling MET by setting the environment variable **MET_PYTHON_EXE**. -The types of Python embedding supported in MET are described below. In all cases, by default, the compiled Python instance is used to execute the Python script. If the packages that script imports are not available for the compiled Python instance, users will encounter a runtime error. In the event of a runtime error, users are advised to set the **MET_PYTHON_EXE** environment variable and rerun. This environment variable should be set to the full path to the version of Python you would like to use. See an example below. +If a user's Python script requires packages that are not available in the Python installation used when compiling the MET software, they will encounter a runtime error when using MET. In this instance, the user will need to change the Python MET is using to a different installation with the required packages for their script. It is the responsibility of the user to manage this Python installation, and one popular approach is to use a custom Anaconda (Conda) Python environment. Once the Python installation meeting the user's requirements is available, the user can force MET to use it by setting the **MET_PYTHON_EXE** environment variable to the full path of the Python executable in that installation. For example: .. code-block:: none export MET_PYTHON_EXE=/usr/local/python3/bin/python3 -Setting this environment variable triggers slightly different processing logic in MET. Rather than executing the user-specified script with compiled Python instance directly, MET does the following: +Setting this environment variable triggers slightly different processing logic in MET than when MET uses the Python installation that was used when compiling MET. When using the Python installation that was used when compiling MET, Python is called directly and data are passed in memory from Python to the MET tools. When the user sets **MET_PYTHON_EXE**, MET does the following: 1. Wrap the user's Python script and arguments with a wrapper script (write_tmp_mpr.py, write_tmp_point.py, or write_tmp_dataplane.py) and specify the name of a temporary file to be written. 2. Use a system call to the **MET_PYTHON_EXE** Python instance to execute these commands and write the resulting data objects to a temporary ASCII or NetCDF file. -3. Use the compiled Python instance to run a wrapper script (read_tmp_ascii.py or read_tmp_dataplane.py) to read data from that temporary file. +3. Use the Python instance that MET was compiled with to run a wrapper script (read_tmp_ascii.py or read_tmp_dataplane.py) to read data from that temporary file. -With this approach, users should be able to execute Python scripts in their own custom environments. +With this approach, users are able to execute Python scripts using their own custom Python installations. .. _pyembed-2d-data: From 3a0a09515e9d82e56bc7426a6af95f8cd373f14a Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 11:19:31 -0600 Subject: [PATCH 37/81] Adds overview of supported data structures. --- docs/Users_Guide/appendixF.rst | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index cefd259093..29c9e19581 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -56,10 +56,25 @@ Setting this environment variable triggers slightly different processing logic i With this approach, users are able to execute Python scripts using their own custom Python installations. +.. _pyembed-data-structures: + +Data Structures Supported by Python Embedding +============================================= + +Python embedding with MET tools offers support for three different types of data structures: + +1. Two-dimensional (2D) gridded dataplanes + +2. Point data conforming to the :ref:`MET 11-column format<_table_reformat-point_ascii2nc_format>` + +3. Matched-pair data conforming to the :ref:`MET MPR Line Type<_table_PS_format_info_MPR>` + +Details for each of these data structures are provided below. + .. _pyembed-2d-data: -Python Embedding for 2D data -============================ +Python Embedding for 2D gridded dataplanes +------------------------------------------ We now describe how to write Python scripts so that the MET tools may extract 2D gridded data fields from them. Currently, MET offers two ways to interact with Python scripts: by using NumPy N-dimensional arrays (ndarrays) or by using Xarray DataArrays. The interface to be used (NumPy or Xarray) is specified on the command line (more on this later). The user's scripts can use any Python libraries that are supported by the local Python installation, or any personal or institutional libraries or code that are desired in order to implement the Python script, so long as the data has been loaded into either a NumPy ndarray or an Xarray DataArray by the end of the script. This offers advantages when using data file formats that MET does not directly support. If there is Python code to read the data format, the user can use those tools to read the data, and then copy the data into a NumPy ndarray or an Xarray DataArray. MET can then ingest the data via the Python script. Note that whether a NumPy ndarray or an Xarray DataArray is used, the data should be stored as double precision floating point numbers. Using different data types, such as integers or single precision floating point numbers, will lead to unexpected results in MET. From d2549889fe485d1638fc36ba4cec9b7ed8e3467e Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 11:22:27 -0600 Subject: [PATCH 38/81] Fixes sub-section headers. --- docs/Users_Guide/appendixF.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 29c9e19581..96b8e6402b 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -273,7 +273,7 @@ The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists .. _pyembed-point-obs-data: Python Embedding for Point Observations -======================================= +--------------------------------------- The ASCII2NC tool supports the "-format python" option. With this option, point observations may be passed as input. An example of this is shown below: @@ -300,7 +300,7 @@ Both of the above examples use the **read_ascii_point.py** sample script which i .. _pyembed-mpr-data: Python Embedding for MPR data -============================= +----------------------------- The Stat-Analysis tool supports the "-lookin python" option. With this option, matched pair (MPR) data may be passed as input. An example of this is provided in :numref:`StA-pyembed`. That example uses the **read_ascii_mpr.py** sample script which is included with the MET code. It reads MPR data and stores it in a Pandas dataframe to be read by the Stat-Analysis tool with Python. From 55689242c2cd9bbe57641d9ac03a0a08fad5f029 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 13:56:26 -0600 Subject: [PATCH 39/81] Adds note about sample commands. --- docs/Users_Guide/appendixF.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 96b8e6402b..182f7c4c57 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -22,6 +22,10 @@ In order to use Python embedding, a local Python installation must be available 3. **netCDF4** Python package +4. **Pandas** Python package + +5. **Xarray** Python package + Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. In addition to using **\-\-enable-python** with **configure** as mentioned above, the following environment variables must also be set prior to executing **configure**: **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**. These may either be set as environment variables or as command line options to **configure**. These environment variables are used when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable in the users local Python installation, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: @@ -71,6 +75,9 @@ Python embedding with MET tools offers support for three different types of data Details for each of these data structures are provided below. +.. note:: +All sample commands listed below were generated at the top level of the MET source code directory. + .. _pyembed-2d-data: Python Embedding for 2D gridded dataplanes From 113db08a5aa32c0a2cd2af8684b07abcb5892734 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 14:00:44 -0600 Subject: [PATCH 40/81] Fixes table references and also adds note about assumed path location and corrects some paths that used MET_BASE. --- docs/Users_Guide/appendixF.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 182f7c4c57..1e8f06f7d0 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -69,14 +69,14 @@ Python embedding with MET tools offers support for three different types of data 1. Two-dimensional (2D) gridded dataplanes -2. Point data conforming to the :ref:`MET 11-column format<_table_reformat-point_ascii2nc_format>` +2. Point data conforming to the :ref:`MET 11-column format` -3. Matched-pair data conforming to the :ref:`MET MPR Line Type<_table_PS_format_info_MPR>` +3. Matched-pair data conforming to the :ref:`MET MPR Line Type` Details for each of these data structures are provided below. .. note:: -All sample commands listed below were generated at the top level of the MET source code directory. +All sample commands and directories listed below are relative to the top level of the MET source code directory. .. _pyembed-2d-data: @@ -287,7 +287,7 @@ The ASCII2NC tool supports the "-format python" option. With this option, point .. code-block:: none ascii2nc -format python \ - "MET_BASE/python/examples/read_ascii_point.py sample_ascii_obs.txt" \ + "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ sample_ascii_obs_python.nc The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process point observations. They support Python embedding of point observations directly on the command line by replacing the input MET NetCDF point observation file name with the Python command to be run. The Python command must begin with the prefix 'PYTHON_NUMPY=' and be followed by the path to the User's Python script and any arguments. The full command should be enclosed in single quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below: @@ -295,14 +295,14 @@ The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process .. code-block:: none plot_point_obs \ - "PYTHON_NUMPY=MET_BASE/python/examples/read_ascii_point.py sample_ascii_obs.txt" \ + "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ output_image.ps Both of the above examples use the **read_ascii_point.py** sample script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** sample script can be found in: -• MET installation directory in *MET_BASE/python/examples*. +• MET installation directory in *scripts/python/examples*. -• `MET GitHub repository `_ in *met/scripts/python/examples*. +• `MET GitHub repository `_ in *scripts/python/examples*. .. _pyembed-mpr-data: @@ -313,6 +313,6 @@ The Stat-Analysis tool supports the "-lookin python" option. With this option, m The **read_ascii_mpr.py** sample script can be found in: -• MET installation directory in *MET_BASE/python/examples*. +• MET installation directory in *scripts/python/examples*. • `MET GitHub repository `_ in *MET/scripts/python/examples*. From e1e298ac2c04079f73fe9e15e1fbf72ca05ff30a Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 14:59:05 -0600 Subject: [PATCH 41/81] Big overhaul to the beginning of the 2D dataplane section. --- docs/Users_Guide/appendixF.rst | 150 +++++++++++++++++++++------------ 1 file changed, 95 insertions(+), 55 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 1e8f06f7d0..d6f77f8e09 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -80,55 +80,69 @@ All sample commands and directories listed below are relative to the top level o .. _pyembed-2d-data: -Python Embedding for 2D gridded dataplanes +Python Embedding for 2D Gridded Dataplanes ------------------------------------------ -We now describe how to write Python scripts so that the MET tools may extract 2D gridded data fields from them. Currently, MET offers two ways to interact with Python scripts: by using NumPy N-dimensional arrays (ndarrays) or by using Xarray DataArrays. The interface to be used (NumPy or Xarray) is specified on the command line (more on this later). The user's scripts can use any Python libraries that are supported by the local Python installation, or any personal or institutional libraries or code that are desired in order to implement the Python script, so long as the data has been loaded into either a NumPy ndarray or an Xarray DataArray by the end of the script. This offers advantages when using data file formats that MET does not directly support. If there is Python code to read the data format, the user can use those tools to read the data, and then copy the data into a NumPy ndarray or an Xarray DataArray. MET can then ingest the data via the Python script. Note that whether a NumPy ndarray or an Xarray DataArray is used, the data should be stored as double precision floating point numbers. Using different data types, such as integers or single precision floating point numbers, will lead to unexpected results in MET. +Currently, MET supports two different types of Python objects for two-dimensional gridded dataplanes: NumPy N-dimensional arrays (ndarrays) and Xarray DataArrays. The keyword **PYTHON_NUMPY** is used on the command line when using ndarrays, and **PYTHON_XARRAY** when using Xarray DataArrays. Example commands are included below. General requirements for Python embedding with two-dimensional gridded dataplanes are as follows: + +1. The data must be stored in a variable with the name **met_data** + +2. The **met_data** variable must be of type **Xarray DataArray** or **NumPy N-D Array** + +3. The data inside the **met_data** variable must be **double precision floating point** type + +4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes<_pyembed-2d-attrs>` + +.. _pyembed-2d_attrs: + +Required Attributes for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The **attrs** dictionary must contain the following information: + +.. list-table:: attrs + :widths: 10 10 10 + :header-rows: 1 + + * - key + - description + - data type + * - valid + - init + - lead + - accum + - name + - long_name + - level + - units + - grid + * - valid time + - initialization time + - forecast lead + - accumulation interval + - variable name + - variable long name + - vertical level + - variable units + - grid informatiomn + * - string (YYYYMMDD_HHMMSS) + - string (YYYYMMDD_HHMMSS) + - string (HHMMSS) + - string (HHMMSS) + - string + - string + - string + - string + - string + - string or dict -**Using NumPy N-dimensional Arrays** - -The data must be loaded into a 2D NumPy ndarray named **met_data**. In addition there must be a Python dictionary named **attrs** which contains metadata such as timestamps, grid projection and other information. Here is an example **attrs** dictionary: - -.. code-block:: none - - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': 'Foo', - 'long_name': 'FooBar', - 'level': 'Surface', - 'units': 'None', - - # Define 'grid' as a string or a dictionary - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - 'name': 'FooGrid', - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - 'lat_pin': 12.19, - 'lon_pin': -135.459, - 'x_pin': 0.0, - 'y_pin': 0.0, - 'lon_orient': -95.0, - 'd_km': 40.635, - 'r_km': 6371.2, - 'nx': 185, - 'ny': 129, - } - - } - -In the **attrs** dictionary, valid time, initialization time, lead time and accumulation time (if any) must be indicated by strings. Valid and initialization times must be given in YYYYMMDD[_HH[MMSS]] format, and lead and accumulation times must be given in HH[MMSS] format, where the square brackets indicate optional elements. The dictionary must also include strings for the name, long_name, level, and units to describe the data. The rest of the **attrs** dictionary gives the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. Those entries are also listed below. Note that the **grid** entry in the **attrs** dictionary can either be defined as a string or as a dictionary itself. +.. note:: + Often times Xarray DataArray objects come with their own set of attributes available as a property. To avoid conflict with the required attributes + for MET, it is advised to strip these attributes and rely on the **attrs** dictionary defined in your script. -If specified as a string, **grid** can be defined as follows: +The grid entry in the **attrs** dictionary must contain the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. The value of this item in the dictionary can either be a string, or another dictionary. Examples of the **grid** entry defined as a string are: -• As a named grid: +• Using a named grid supported by MET: .. code-block:: none @@ -146,7 +160,7 @@ If specified as a string, **grid** can be defined as follows: 'grid': '/path/to/sample_data.grib' -When specified as a dictionary, the contents of the **grid** dictionary vary based on the grid **type** string. The entries for the supported grid types are described below: +When specified as a dictionary, the contents of the **grid** entry vary based upon the grid **type**. The required elements for supported grid types are: • **Lambert Conformal** grid dictionary entries: @@ -216,19 +230,45 @@ When specified as a dictionary, the contents of the **grid** dictionary vary bas Additional information about supported grids can be found in :ref:`appendixB`. -**Using Xarray DataArrays** - -To use Xarray DataArrays, a similar procedure to the NumPy case is followed. The Xarray DataArray can be represented as a NumPy N-dimensional array (ndarray) via the **values** property of the DataArray, and an **attrs** property that contains a dictionary of attributes. The user must name the Xarray DataArray to be **met_data**. When one of the MET tools runs the Python script, it will look for an Xarray DataArray named **met_data**, and will retrieve the data and metadata from the **values** and **attrs** properties, respectively, of the Xarray DataArray. The Xarray DataArray **attrs** dictionary is populated in the same way as for the NumPy interface (please see :ref:`pyembed-2d-data` for requirements of each entry in the **attrs** dictionary). The **values** NumPy ndarray property of the Xarray DataArray is also populated in the same way as the NumPy case. +Finally, an example **attrs** dictionary is shown below: -.. note:: - Currently, MET does not support Xarray Dataset structures. If you have a Dataset in Xarray, you can create a DataArray of a single variable using: - - met_data = xr.DataArray(ds.varname,attrs=ds.attrs) +.. code-block:: none - | ds = your Dataset name - | varname = variable name in the Dataset you'd like to use in MET + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': 'Foo', + 'long_name': 'FooBar', + 'level': 'Surface', + 'units': 'None', + + # Define 'grid' as a string or a dictionary + + 'grid': { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + 'name': 'FooGrid', + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + 'lat_pin': 12.19, + 'lon_pin': -135.459, + 'x_pin': 0.0, + 'y_pin': 0.0, + 'lon_orient': -95.0, + 'd_km': 40.635, + 'r_km': 6371.2, + 'nx': 185, + 'ny': 129, + } + + } -__________________ +Example Commands for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It remains to discuss command lines and config files. Two methods for specifying the Python command and input file name are supported. From dc74182154ade644a91a5510410b390f0b028692 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 15:06:46 -0600 Subject: [PATCH 42/81] Fixes table problems. --- docs/Users_Guide/appendixF.rst | 41 +++++++++++++++++----------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d6f77f8e09..0e9695dc6a 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -108,32 +108,31 @@ The **attrs** dictionary must contain the following information: - description - data type * - valid - - init - - lead - - accum - - name - - long_name - - level - - units - - grid - * - valid time + - valid time + - string (YYYYMMDD_HHMMSS) + * - init - initialization time - - forecast lead - - accumulation interval - - variable name - - variable long name - - vertical level - - variable units - - grid informatiomn - * - string (YYYYMMDD_HHMMSS) - string (YYYYMMDD_HHMMSS) + * - lead + - forecast lead - string (HHMMSS) + * - accum + - accumulation interval - string (HHMMSS) + * - name + - variable name - string + * - long_name + - variable long name - string + * - level + - variable level - string + * - units + - variable units - string - - string + * - grid + - grid information - string or dict .. note:: @@ -267,10 +266,10 @@ Finally, an example **attrs** dictionary is shown below: } -Example Commands for 2D Gridded Dataplanes -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Running Python Embedding for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -It remains to discuss command lines and config files. Two methods for specifying the Python command and input file name are supported. +Two methods for specifying the Python command and input file name are supported. **Python Embedding Option 1:** From 5aeeea980cdf343ca9ae63b82eb741b73e1b56c8 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 15:21:21 -0600 Subject: [PATCH 43/81] Updates the rest of the gridded data section. --- docs/Users_Guide/appendixF.rst | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 0e9695dc6a..acc1a9a923 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -38,7 +38,6 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. - Controlling Which Python MET uses with MET_PYTHON_EXE ===================================================== @@ -76,7 +75,8 @@ Python embedding with MET tools offers support for three different types of data Details for each of these data structures are provided below. .. note:: -All sample commands and directories listed below are relative to the top level of the MET source code directory. + + All sample commands and directories listed below are relative to the top level of the MET source code directory. .. _pyembed-2d-data: @@ -136,6 +136,7 @@ The **attrs** dictionary must contain the following information: - string or dict .. note:: + Often times Xarray DataArray objects come with their own set of attributes available as a property. To avoid conflict with the required attributes for MET, it is advised to strip these attributes and rely on the **attrs** dictionary defined in your script. @@ -269,15 +270,9 @@ Finally, an example **attrs** dictionary is shown below: Running Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Two methods for specifying the Python command and input file name are supported. - -**Python Embedding Option 1:** +On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either **PYTHON_NUMPY** or **PYTHON_XARRAY** wherever a (forecast or observation) data file name would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data, the user should list the **full path** to the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it is entirely possible to use the **PYTHON_NUMPY** for one file and the **PYTHON_XARRAY** for the other. -On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either PYTHON_NUMPY or PYTHON_XARRAY wherever a (forecast or observation) data file name would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data, the user should list the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it would be entirely possible to use the NumPy interface for one file and the Xarray interface for the other. - -___________________ - -Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory. +Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory: .. code-block:: none @@ -285,7 +280,7 @@ Listed below is an example of running the Plot-Data-Plane tool to call a Python 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ -title "Python enabled plot_data_plane" -The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling a NumPy Python script, set this to the constant string PYTHON_NUMPY. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the Python script to be run along with command line arguments. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages. +The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling Python script that has a two-dimensional gridded dataplane stored in a NumPy N-D array object, set this to the constant string PYTHON_NUMPY. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the full path of the Python script to be run along with any command line arguments for that script. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages: .. code-block:: none @@ -301,13 +296,11 @@ The first argument for the Plot-Data-Plane tool is the gridded data file to be r 'grid': {...} } DEBUG 1: Creating postscript file: fcst.ps -**Python Embedding Option 2 using MET_PYTHON_INPUT_ARG:** - -The second option was added to support the use of Python embedding in tools which read multiple input files. Option 1 reads a single field of data from a single source, whereas tools like Ensemble-Stat, Series-Analysis, and MTD read data from multiple input files. While option 2 can be used in any of the MET tools, it is required for Python embedding in Ensemble-Stat, Series-Analysis, and MTD. +**Special Case for Ensemble-Stat, Series-Analysis, and MTD** -On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in PYTHON_NUMPY or PYTHON_XARRAY on the command line. In the config file dictionary set the **file_type** entry to either PYTHON_NUMPY or PYTHON_XARRAY to activate the Python embedding logic. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string MET_PYTHON_INPUT_ARG. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the option 2 logic instead. +Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding was required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. -The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. +On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in PYTHON_NUMPY or PYTHON_XARRAY on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: .. code-block:: none From 43319b65fe653261d404b1864603771c30a49cf5 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 15:30:11 -0600 Subject: [PATCH 44/81] Fixes warning and tweaks table. --- docs/Users_Guide/appendixF.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index acc1a9a923..56e48327dd 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -93,7 +93,7 @@ Currently, MET supports two different types of Python objects for two-dimensiona 4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes<_pyembed-2d-attrs>` -.. _pyembed-2d_attrs: +.. _pyembed-2d-attrs: Required Attributes for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -101,12 +101,12 @@ Required Attributes for 2D Gridded Dataplanes The **attrs** dictionary must contain the following information: .. list-table:: attrs - :widths: 10 10 10 + :widths: 5 5 10 :header-rows: 1 * - key - description - - data type + - data type/format * - valid - valid time - string (YYYYMMDD_HHMMSS) @@ -298,9 +298,9 @@ The first argument for the Plot-Data-Plane tool is the gridded data file to be r **Special Case for Ensemble-Stat, Series-Analysis, and MTD** -Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding was required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. +Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding is required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. -On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in PYTHON_NUMPY or PYTHON_XARRAY on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: +On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in **PYTHON_NUMPY** or **PYTHON_XARRAY** on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: .. code-block:: none From 549aa68ba5c3f7d26bb9080cb2832d081f610496 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 15:44:32 -0600 Subject: [PATCH 45/81] Adds required Python version and attempts to fix RST link warning. --- docs/Users_Guide/appendixF.rst | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 56e48327dd..d7ee595574 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -16,15 +16,17 @@ Compiling MET for Python Embedding In order to use Python embedding, a local Python installation must be available when compiling the MET software with the following requirements: -1. C-language Python header files and libraries +1. Python version 3.8.6+ -2. **NumPy** Python package +2. C-language Python header files and libraries -3. **netCDF4** Python package +3. **NumPy** Python package -4. **Pandas** Python package +4. **netCDF4** Python package -5. **Xarray** Python package +5. **Pandas** Python package + +6. **Xarray** Python package Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. @@ -91,7 +93,7 @@ Currently, MET supports two different types of Python objects for two-dimensiona 3. The data inside the **met_data** variable must be **double precision floating point** type -4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes<_pyembed-2d-attrs>` +4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes` .. _pyembed-2d-attrs: From 38952d642d74762fa13cb2d017862e9733f8c922 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 16:04:05 -0600 Subject: [PATCH 46/81] Adds placeholder for examples of pyembed with 2D gridded dataplanes. --- docs/Users_Guide/appendixF.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d7ee595574..e40fd5a8b3 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -34,9 +34,9 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above • Set **MET_PYTHON_BIN_EXE** to the full path of the desired Python executable. -• On the command line, run "**python3-config --cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. +• On the command line, run "**python3-config \-\-cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. -• Again on the command line, run "**python3-config --ldflags**". Set the value of **MET_PYTHON_LD** to the output of that command. +• Again on the command line, run "**python3-config \-\-ldflags**". Set the value of **MET_PYTHON_LD** to the output of that command. Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. @@ -311,6 +311,9 @@ On the command line for any of the MET tools, specify the path to the input grid file_type=PYTHON_NUMPY;' \ -title "Python enabled plot_data_plane" +Examples of Python Embedding for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + .. _pyembed-point-obs-data: Python Embedding for Point Observations From 489b6e83d6fd1152bac8618a8d09067dcd4457f3 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Tue, 11 Apr 2023 16:11:18 -0600 Subject: [PATCH 47/81] Adds a better table name for the 2D attribute table. --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index e40fd5a8b3..fbb44a53a8 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -102,7 +102,7 @@ Required Attributes for 2D Gridded Dataplanes The **attrs** dictionary must contain the following information: -.. list-table:: attrs +.. list-table:: 2D Dataplane Attributes :widths: 5 5 10 :header-rows: 1 From 993ec8c2ef30996c6bfb60972e2eddd3f95b5688 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 13 Apr 2023 08:54:42 -0600 Subject: [PATCH 48/81] #2285 Changed argument usecols to col_start and col_last for read_mpr --- scripts/python/met/mprbase.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/scripts/python/met/mprbase.py b/scripts/python/met/mprbase.py index c50ab89a29..57e5769015 100644 --- a/scripts/python/met/mprbase.py +++ b/scripts/python/met/mprbase.py @@ -4,15 +4,18 @@ class mpr_data(): - def read_mpr(input_file, usecols=range(1,37), header=None, - delim_whitespace=True, keep_default_na=False, - skiprows=1, dtype=str): - mpr_data = pd.read_csv(input_file, header=header, - delim_whitespace=delim_whitespace, - keep_default_na=keep_default_na, - skiprows=skiprows, usecols=usecols, - dtype=dtype).values.tolist() - return mpr_data + # Read a text file with N columns and returns the list of N column data + # Skip first "col_start" columns if col_start is not 0. + def read_mpr(input_file, col_last, col_start = 0, header=None, + delim_whitespace=True, keep_default_na=False, + skiprows=1, dtype=str): + mpr_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + skiprows=skiprows, + usecols=range(col_start,col_last+1), + dtype=dtype).values.tolist() + return mpr_data ######################################################################## From 9fae26f3954ab075526f9d77e17cb622b6b5180b Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 13 Apr 2023 08:57:07 -0600 Subject: [PATCH 49/81] #2285 Changed argument usecols to col_start and col_last for read_mpr --- scripts/python/examples/read_ascii_mpr.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/scripts/python/examples/read_ascii_mpr.py b/scripts/python/examples/read_ascii_mpr.py index c8a68912fc..cd964434fc 100644 --- a/scripts/python/examples/read_ascii_mpr.py +++ b/scripts/python/examples/read_ascii_mpr.py @@ -13,19 +13,20 @@ ## if len(sys.argv) != 2: - print("ERROR: read_ascii_point.py -> Must specify exactly one input file.") - sys.exit(1) + print("ERROR: read_ascii_mpr.py -> Must specify exactly one input file.") + sys.exit(1) # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) try: - print("Input File:\t" + repr(input_file)) + print("Input File:\t" + repr(input_file)) - # Read MPR lines, skipping the header row and first column. - mpr_data = mpr_data.read_mpr(input_file) - print("Data Length:\t" + repr(len(mpr_data))) - print("Data Type:\t" + repr(type(mpr_data))) + # Read MPR lines by using pandas package, skipping the header row and + # first column. Input should be a 36 column text data. + mpr_data = mpr_data.read_mpr(input_file, col_start=1, col_last=36, skiprows=1) + print("Data Length:\t" + repr(len(mpr_data))) + print("Data Type:\t" + repr(type(mpr_data))) except NameError: - print("Can't find the input file") + print("Can't find the input file") ######################################################################## From 236d59cb54cf7316a2802b9b2714bb68ab543d2b Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 13 Apr 2023 09:00:04 -0600 Subject: [PATCH 50/81] #2285 Changed spaces for indent (4 to 3) --- scripts/python/examples/read_ascii_point.py | 64 ++++++++++--------- scripts/python/examples/read_met_point_obs.py | 32 ++++++---- 2 files changed, 51 insertions(+), 45 deletions(-) diff --git a/scripts/python/examples/read_ascii_point.py b/scripts/python/examples/read_ascii_point.py index f153e8bfd7..05f5edf340 100644 --- a/scripts/python/examples/read_ascii_point.py +++ b/scripts/python/examples/read_ascii_point.py @@ -14,47 +14,49 @@ arg_cnt = len(sys.argv) if arg_cnt < 2: - print("ERROR: read_ascii_point.py -> Missing an input file.") - sys.exit(1) + print("ERROR: read_ascii_point.py -> Missing an input file.") + sys.exit(1) do_convert = False last_index = 2 if arg_cnt > last_index: - opt_convert = sys.argv[2] - if opt_convert.lower() == "do_convert" or opt_convert.lower() == "convert": - do_convert = True - last_index += 1 + opt_convert = sys.argv[2] + if opt_convert.lower() == "do_convert" or opt_convert.lower() == "convert": + do_convert = True + last_index += 1 if last_index < arg_cnt: - print(" INFO: read_ascii_point.py -> Too many argument, ignored {o}.".format( - o=' '.join(sys.argv[last_index:]))) + print(" INFO: read_ascii_point.py -> Too many argument, ignored {o}.".format( + o=' '.join(sys.argv[last_index:]))) # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) try: - print("Input File:\t" + repr(input_file)) - - # Read and format the input 11-column observations: - # (1) string: Message_Type - # (2) string: Station_ID - # (3) string: Valid_Time(YYYYMMDD_HHMMSS) - # (4) numeric: Lat(Deg North) - # (5) numeric: Lon(Deg East) - # (6) numeric: Elevation(msl) - # (7) string: Var_Name(or GRIB_Code) - # (8) numeric: Level - # (9) numeric: Height(msl or agl) - # (10) string: QC_String - # (11) numeric: Observation_Value - - point_data = tools.read_text_point_obs(input_file) - print(" point_data: Data Length:\t" + repr(len(point_data))) - print(" point_data: Data Type:\t" + repr(type(point_data))) - if do_convert: - met_point_data = tools.convert_point_data(point_data) - print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) + print("Input File:\t" + repr(input_file)) + + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + + # Read 11 column text input data by using pandas package + point_data = tools.read_text_point_obs(input_file) + print(" point_data: Data Length:\t" + repr(len(point_data))) + print(" point_data: Data Type:\t" + repr(type(point_data))) + if do_convert: + # Convert 11 column list to MET's internal python instance + met_point_data = tools.convert_point_data(point_data) + print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) except FileNotFoundError: - print(f"The input file {input_file} does not exist") - sys.exit(1) + print(f"The input file {input_file} does not exist") + sys.exit(1) ######################################################################## diff --git a/scripts/python/examples/read_met_point_obs.py b/scripts/python/examples/read_met_point_obs.py index 8fbb5ef85c..b84248c34c 100644 --- a/scripts/python/examples/read_met_point_obs.py +++ b/scripts/python/examples/read_met_point_obs.py @@ -32,25 +32,29 @@ input_name = sys.argv[1] prompt = met_point_tools.get_prompt() if len(sys.argv) == 1 or ARG_PRINT_DATA == input_name: - point_obs_data = met_point_tools.get_sample_met_point_obs() - point_obs_data.read_data([]) + # This is an example of creating a sample data + point_obs_data = met_point_tools.get_sample_met_point_obs() + point_obs_data.read_data([]) elif met_point_tools.is_python_prefix(input_name): - point_obs_data = pyembed_tools.call_python(sys.argv) + # This is an example of calling a python script for ascii2nc + point_obs_data = pyembed_tools.call_python(sys.argv) else: - netcdf_filename = os.path.expandvars(input_name) - args = [ netcdf_filename ] - #args = { 'nc_name': netcdf_filename } - point_obs_data = met_point_tools.get_nc_point_obs() - point_obs_data.read_data(point_obs_data.get_nc_filename(args)) + # This is an example of reading MET's point observation NetCDF file + # from ascii2nc, madis2nc, and pb2nc + netcdf_filename = os.path.expandvars(input_name) + args = [ netcdf_filename ] + #args = { 'nc_name': netcdf_filename } + point_obs_data = met_point_tools.get_nc_point_obs() + point_obs_data.read_data(point_obs_data.get_nc_filename(args)) if point_obs_data is not None: - met_point_data = point_obs_data.get_point_data() - met_point_data['met_point_data'] = point_obs_data - print("met_point_data: ", met_point_data) - print(met_point_data) + met_point_data = point_obs_data.get_point_data() + met_point_data['met_point_data'] = point_obs_data + print("met_point_data: ", met_point_data) + print(met_point_data) - if DO_PRINT_DATA: - point_obs_data.dump() + if DO_PRINT_DATA: + point_obs_data.dump() run_time = datetime.now() - start_time From 632dcf1596eafa26945b7796eccb7dd2c59c2de1 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 13 Apr 2023 09:05:48 -0600 Subject: [PATCH 51/81] #2285 Renamed load_txt to read_2d_text_input. Removed get_grid_metadata and added create_header_attrs --- scripts/python/examples/read_ascii_numpy.py | 63 ++++- .../python/examples/read_ascii_numpy_grid.py | 45 +++- scripts/python/examples/read_ascii_xarray.py | 63 ++++- scripts/python/met/dataplane.py | 250 ++++++++---------- 4 files changed, 270 insertions(+), 151 deletions(-) diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py index 974b2ce158..dad36f7800 100644 --- a/scripts/python/examples/read_ascii_numpy.py +++ b/scripts/python/examples/read_ascii_numpy.py @@ -19,8 +19,67 @@ input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = dataplane.load_txt(input_file, data_name) +try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + met_data = None + print("Can't find the input file") -attrs = dataplane.get_grid_metadata(data_name) +# attrs is a dictionary which contains header data for the dataplane. +# attrs should have 9 members with string type data: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' + +grid_lambert_conformal = { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, +} + +long_name = data_name + "_word" +attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_lambert_conformal, long_name) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_numpy_grid.py b/scripts/python/examples/read_ascii_numpy_grid.py index e1f5cb1a34..6e039b67b1 100644 --- a/scripts/python/examples/read_ascii_numpy_grid.py +++ b/scripts/python/examples/read_ascii_numpy_grid.py @@ -12,18 +12,53 @@ ## if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = dataplane.load_txt(input_file, data_name) +try: + # Print some output to verify that this script ran + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + print("Can't find the input file") + +# attrs is a dictionary which contains header data for the dataplane. +# attrs should have 9 members with string type data: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' ## create the metadata dictionary from the environment variable, -## Default env_name = 'PYTHON_GRID' +grid_info = os.path.expandvars(os.getenv('PYTHON_GRID')) -attrs = dataplane.get_grid_metadata_from_env(data_name) +long_name = data_name + "_word" +attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_info, long_name) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py index 91c8edb6ea..dbc4e4b33f 100644 --- a/scripts/python/examples/read_ascii_xarray.py +++ b/scripts/python/examples/read_ascii_xarray.py @@ -20,7 +20,16 @@ input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] -met_data = dataplane.load_txt(input_file, data_name) +try: + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) +except NameError: + met_data = None + print("Can't read the input file") ########################################### @@ -28,7 +37,57 @@ ## create the metadata dictionary ## -attrs = dataplane.get_grid_metadata(data_name) +# attrs is a dictionary which contains header data for the dataplane. +# attrs should have 9 members with string type data: +# 'name': data name +# 'long_name': descriptive name +# 'valid': valid time (format = 'yyyymmdd_hhmmss') +# 'init': init time (format = 'yyyymmdd_hhmmss') +# 'lead': lead time (format = 'hhmmss') +# 'accum': accumulation time (format = 'hhmmss') +# 'level': vertilcal level +# 'units': units of the data +# 'grid': contains the grid information +# - a grid name (G212) +# - a gridded data file name +# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" +# - a dictionary for the grid information + +valid_time = '20050807_120000' +init_time = '20050807_000000' +lead_time = '120000' +accum_time = '120000' +v_level = 'Surface' +units = 'None' + +grid_lambert_conformal = { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, +} + +long_name = data_name + "_word" +attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_lambert_conformal) print("Attributes:\t" + repr(attrs)) diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py index 841c000fe1..e454918d65 100644 --- a/scripts/python/met/dataplane.py +++ b/scripts/python/met/dataplane.py @@ -6,146 +6,112 @@ class dataplane(): - ## - ## create the metadata dictionary - ## - - #@staticmethod - def get_grid_metadata(data_name): - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, - } - - } - return attrs - - ## - ## create the metadata dictionary from the environment variable PYTHON_GRID - ## - - #@staticmethod - def get_grid_metadata_from_env(data_name, grid_env_name='PYTHON_GRID'): - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': data_name, - 'long_name': data_name + '_word', - 'level': 'Surface', - 'units': 'None', - 'grid': os.path.expandvars(os.getenv(grid_env_name)) - } - return attrs - - #@staticmethod - def load_txt(input_file, data_name): - try: - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - met_data = np.loadtxt(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) - except NameError: - met_data = None - print("Can't find the input file") - return met_data - - #@staticmethod - def read_dataplane(netcdf_filename): - # read NetCDF file - ds = nc.Dataset(netcdf_filename, 'r') - met_data = ds['met_data'][:] - met_attrs = {} - - # grid is defined as a dictionary or string - grid = {} - for attr, attr_val in ds.__dict__.items(): - if 'grid.' in attr: - grid_attr = attr.split('.')[1] - grid[grid_attr] = attr_val - else: - met_attrs[attr] = attr_val - - if grid: - met_attrs['grid'] = grid - - met_attrs['name'] = met_attrs['name_str'] - del met_attrs['name_str'] - - met_info = {} - met_info['met_data'] = met_data - met_info['attrs'] = met_attrs - return met_info - - #@staticmethod - def write_dataplane(met_in, netcdf_filename): - met_info = {'met_data': met_in.met_data} - if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: - attrs = met_in.met_data.attrs - else: - attrs = met_in.attrs - met_info['attrs'] = attrs - - # determine fill value - try: - fill = met_in.met_data.get_fill_value() - except: - fill = -9999. - - # write NetCDF file - ds = nc.Dataset(netcdf_filename, 'w') - - # create dimensions and variable - nx, ny = met_in.met_data.shape - ds.createDimension('x', nx) - ds.createDimension('y', ny) - dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) - dp[:] = met_in.met_data - - # append attributes - for attr, attr_val in met_info['attrs'].items(): - if attr == 'name': - setattr(ds, 'name_str', attr_val) - elif type(attr_val) == dict: - for key in attr_val: - setattr(ds, attr + '.' + key, attr_val[key]) - else: - setattr(ds, attr, attr_val) - - ds.close() + ## + ## create the metadata dictionary + ## + + #@staticmethod + # 'name': data name + # 'long_name': descriptive name + # 'valid': valid time (format = 'yyyymmdd_hhmmss') + # 'init': init time (format = 'yyyymmdd_hhmmss') + # 'lead': lead time (format = 'hhmmss') + # 'accum': accumulation time (format = 'hhmmss') + # 'level': vertilcal level + # 'units': units of the data + # 'grid': contains the grid information + # - a grid name (G212) + # - a gridded data file name + # - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" + # - a dictionary for the grid information + def create_header_attrs(data_name, valid_time, init_time, lead_time, + accum_time, v_level, units, grid_info, long_name=None): + hdr_attrs = { + + 'valid': valid_time, + 'init': init_time, + 'lead': lead_time, + 'accum': accum_time, + + 'name': data_name, + 'long_name': long_name if long_name is not None and long_name != "" else data_name + '_word', + 'level': v_level, + 'units': units, + + 'grid': grid_info + + } + return hdr_attrs + + #@staticmethod + def read_2d_text_input(input_file): + if os.path.exists(input_file): + met_data = np.loadtxt(input_file) + else: + met_data = None + return met_data + + #@staticmethod + def read_dataplane(netcdf_filename): + # read NetCDF file + ds = nc.Dataset(netcdf_filename, 'r') + met_data = ds['met_data'][:] + met_attrs = {} + + # grid is defined as a dictionary or string + grid = {} + for attr, attr_val in ds.__dict__.items(): + if 'grid.' in attr: + grid_attr = attr.split('.')[1] + grid[grid_attr] = attr_val + else: + met_attrs[attr] = attr_val + + if grid: + met_attrs['grid'] = grid + + met_attrs['name'] = met_attrs['name_str'] + del met_attrs['name_str'] + + met_info = {} + met_info['met_data'] = met_data + met_info['attrs'] = met_attrs + return met_info + + #@staticmethod + def write_dataplane(met_in, netcdf_filename): + met_info = {'met_data': met_in.met_data} + if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: + attrs = met_in.met_data.attrs + else: + attrs = met_in.attrs + met_info['attrs'] = attrs + + # determine fill value + try: + fill = met_in.met_data.get_fill_value() + except: + fill = -9999. + + # write NetCDF file + ds = nc.Dataset(netcdf_filename, 'w') + + # create dimensions and variable + nx, ny = met_in.met_data.shape + ds.createDimension('x', nx) + ds.createDimension('y', ny) + dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) + dp[:] = met_in.met_data + + # append attributes + for attr, attr_val in met_info['attrs'].items(): + if attr == 'name': + setattr(ds, 'name_str', attr_val) + elif type(attr_val) == dict: + for key in attr_val: + setattr(ds, attr + '.' + key, attr_val[key]) + else: + setattr(ds, attr, attr_val) + + ds.close() From f33b3a502b044637d9b25ee027183cfe16583f97 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 08:19:21 -0600 Subject: [PATCH 52/81] Some more tweaks to the 2D gridded dataplane section. --- docs/Users_Guide/appendixF.rst | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index fbb44a53a8..d56a93a344 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -16,7 +16,7 @@ Compiling MET for Python Embedding In order to use Python embedding, a local Python installation must be available when compiling the MET software with the following requirements: -1. Python version 3.8.6+ +1. Python version 3.10.4+ 2. C-language Python header files and libraries @@ -40,8 +40,8 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. -Controlling Which Python MET uses with MET_PYTHON_EXE -===================================================== +Controlling Which Python MET uses When Running +============================================== When MET is compiled with Python embedding support, MET uses the Python executable in that Python installation by default when Python embedding is used. However, for users of highly configurable Python environments, the Python instance set at compilation time may not be sufficient. Users may want to use an alternate Python installation if they need additional packages not available in the Python installation used when compiling MET. In MET versions 9.0+, users have the ability to use a different Python executable when running MET than the version used when compiling MET by setting the environment variable **MET_PYTHON_EXE**. @@ -85,7 +85,10 @@ Details for each of these data structures are provided below. Python Embedding for 2D Gridded Dataplanes ------------------------------------------ -Currently, MET supports two different types of Python objects for two-dimensional gridded dataplanes: NumPy N-dimensional arrays (ndarrays) and Xarray DataArrays. The keyword **PYTHON_NUMPY** is used on the command line when using ndarrays, and **PYTHON_XARRAY** when using Xarray DataArrays. Example commands are included below. General requirements for Python embedding with two-dimensional gridded dataplanes are as follows: +Currently, MET supports two different types of Python objects for two-dimensional gridded dataplanes: NumPy N-dimensional arrays (ndarrays) and Xarray DataArrays. The keyword **PYTHON_NUMPY** is used on the command line when using ndarrays, and **PYTHON_XARRAY** when using Xarray DataArrays. Example commands are included at the end of this section. + +Python Script Requirements for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 1. The data must be stored in a variable with the name **met_data** @@ -272,7 +275,7 @@ Finally, an example **attrs** dictionary is shown below: Running Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either **PYTHON_NUMPY** or **PYTHON_XARRAY** wherever a (forecast or observation) data file name would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data, the user should list the **full path** to the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it is entirely possible to use the **PYTHON_NUMPY** for one file and the **PYTHON_XARRAY** for the other. +On the command line for any of the MET tools which will be obtaining its data from a Python script rather than directly from a data file, the user should specify either **PYTHON_NUMPY** or **PYTHON_XARRAY** wherever a (forecast or observation) data file would normally be given. Then in the **name** entry of the config file dictionaries for the forecast or observation data (typically used to specify the field name from the input data file), the user should list the **full path** to the Python script to be run followed by any command line arguments for that script. Note that for tools like MODE that take two data files, it is entirely possible to use the **PYTHON_NUMPY** for one file and the **PYTHON_XARRAY** for the other. Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory: @@ -282,7 +285,7 @@ Listed below is an example of running the Plot-Data-Plane tool to call a Python 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ -title "Python enabled plot_data_plane" -The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling Python script that has a two-dimensional gridded dataplane stored in a NumPy N-D array object, set this to the constant string PYTHON_NUMPY. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the full path of the Python script to be run along with any command line arguments for that script. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages: +The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling Python script that has a two-dimensional gridded dataplane stored in a NumPy N-D array object, set this to the constant string **PYTHON_NUMPY**. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the full path of the Python script to be run along with any command line arguments for that script. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages: .. code-block:: none @@ -298,9 +301,10 @@ The first argument for the Plot-Data-Plane tool is the gridded data file to be r 'grid': {...} } DEBUG 1: Creating postscript file: fcst.ps -**Special Case for Ensemble-Stat, Series-Analysis, and MTD** +Special Case for Ensemble-Stat, Series-Analysis, and MTD +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding is required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set MET_PYTHON_INPUT_ARG to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. +Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding is required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set **MET_PYTHON_INPUT_ARG** to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in **PYTHON_NUMPY** or **PYTHON_XARRAY** on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: @@ -314,6 +318,10 @@ On the command line for any of the MET tools, specify the path to the input grid Examples of Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +1. TBD: MET commands +2. TBD: Python Scripts +3. TBD: Scripts and stuff from the DTC webpage? + .. _pyembed-point-obs-data: Python Embedding for Point Observations From 97d3ec7ed6f5d6b3d581bddc04b0b6dc8237758d Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 09:15:48 -0600 Subject: [PATCH 53/81] Adds sample GridStat Python Embedding example. --- docs/Users_Guide/appendixF.rst | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d56a93a344..2ad7eb8a3e 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -318,6 +318,32 @@ On the command line for any of the MET tools, specify the path to the input grid Examples of Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +**GridStat with Python embedding for forecast and observations** + +.. code-block:: none + :caption: GridStat Command + + grid_stat 'PYTHON_NUMPY' 'PYTHON_NUMPY' GridStat_config -outdir /path/to/output + +.. code-block:: none + :caption: GridStat Config + + fcst = { + field = [ + { + name = "/path/to/fcst/python/script.py python_arg1 python_arg2"; + } + ]; + } + + obs = { + field = [ + { + name = "/path/to/obs/python/script.py python_arg1 python_arg2"; + } + ]; + } + 1. TBD: MET commands 2. TBD: Python Scripts 3. TBD: Scripts and stuff from the DTC webpage? From f71cb4ef3f1b51c1d69c59e38e6616052ac5b765 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 10:21:25 -0600 Subject: [PATCH 54/81] Updates to the point observation section and adds stub for the MET Python module API. --- docs/Users_Guide/appendixF.rst | 94 ++++++++++++++++++++++++++++++++-- 1 file changed, 90 insertions(+), 4 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 2ad7eb8a3e..6901f24f20 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -344,15 +344,86 @@ Examples of Python Embedding for 2D Gridded Dataplanes ]; } -1. TBD: MET commands -2. TBD: Python Scripts -3. TBD: Scripts and stuff from the DTC webpage? +1. TODO: MET commands +2. TODO: Python Scripts +3. TODO: Scripts and stuff from the DTC webpage? .. _pyembed-point-obs-data: Python Embedding for Point Observations --------------------------------------- +MET also supports point observation data supplied in the :ref:`MET 11-column format`. + +Python Script Requirements for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The data must be stored in a variable with the name **point_data** + +2. The **point_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame + +3. The **point_data** variable must have data in each of the 11 columns required for the MET tools even if it is NA + +To provide the data that MET expects for point observations, the user is encouraged when designing their Python script to consider how to map their observations into the MET 11-column format linked above. Then, the user can populate their observations into a Pandas DataFrame with the following column names and dtypes: + +.. list-table:: Point Observation DataFrame Columns and Dtypes + :widths: 5 5 10 + :header-rows: 1 + + * - column name + - data type (dtype) + - description + * - typ + - string + - Message Type + * - sid + - string + - Station ID + * - vld + - string + - Valid Time (YYYYMMDD_HHMMSS) + * - lat + - numeric + - Latitude (Degrees North) + * - lon + - numeric + - Longitude (Degrees East) + * - elv + - numeric + - Elevation (MSL) + * - var + - string + - Variable name (or GRIB code) + * - lvl + - numeric + - Level + * - hgt + - numeric + - Height (MSL or AGL) + * - qc + - string + - QC string + * - obs + - numeric + - Observation Value + +Creating the MET 11-column variable from a Pandas DataFrame +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To create the required object for MET, use the **.values** property of the Pandas DataFrame and the **.tolist()** method of the NumPy N-D Array. For example: + +.. code-block:: Python + :caption: Convert Pandas DataFrame to MET variable + + # Pandas DataFrame + my_dataframe = pd.DataFrame() + + # Convert to MET variable + point_data = my_dataframe.values.tolist() + +Running Python Embedding for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + The ASCII2NC tool supports the "-format python" option. With this option, point observations may be passed as input. An example of this is shown below: .. code-block:: none @@ -369,12 +440,17 @@ The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ output_image.ps -Both of the above examples use the **read_ascii_point.py** sample script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** sample script can be found in: +Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: • MET installation directory in *scripts/python/examples*. • `MET GitHub repository `_ in *scripts/python/examples*. +Examples of Python Embedding for Point Observations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +TODO: Add some more examples here? + .. _pyembed-mpr-data: Python Embedding for MPR data @@ -387,3 +463,13 @@ The **read_ascii_mpr.py** sample script can be found in: • MET installation directory in *scripts/python/examples*. • `MET GitHub repository `_ in *MET/scripts/python/examples*. + +MET Python Module +================= + +TODO: Maybe document some of the base classes and functions here? + +I think the most important is: +met.dataplane.set_dataplane_attrs() + +Maybe add others later on. From 1b92a9db4dd7495c5f3af70ebcb152be9942537c Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 11:04:48 -0600 Subject: [PATCH 55/81] More updates for point observations section. --- docs/Users_Guide/appendixF.rst | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 6901f24f20..e7797b78a2 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -364,7 +364,7 @@ Python Script Requirements for Point Observations 3. The **point_data** variable must have data in each of the 11 columns required for the MET tools even if it is NA -To provide the data that MET expects for point observations, the user is encouraged when designing their Python script to consider how to map their observations into the MET 11-column format linked above. Then, the user can populate their observations into a Pandas DataFrame with the following column names and dtypes: +To provide the data that MET expects for point observations, the user is encouraged when designing their Python script to consider how to map their observations into the MET 11-column format. Then, the user can populate their observations into a Pandas DataFrame with the following column names and dtypes: .. list-table:: Point Observation DataFrame Columns and Dtypes :widths: 5 5 10 @@ -424,21 +424,22 @@ To create the required object for MET, use the **.values** property of the Panda Running Python Embedding for Point Observations ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The ASCII2NC tool supports the "-format python" option. With this option, point observations may be passed as input. An example of this is shown below: +The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools support Python embedding for point observations. Python embedding for these tools can be invoked directly on the command line by replacing the input MET NetCDF point observation file name with the **full path** to the Python script and any arguments. The Python command must begin with the prefix **PYTHON_NUMPY=**. The full command should be enclosed in quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below for Plot-Point-Obs: .. code-block:: none - ascii2nc -format python \ - "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ - sample_ascii_obs_python.nc + plot_point_obs \ + "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + output_image.ps -The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools also process point observations. They support Python embedding of point observations directly on the command line by replacing the input MET NetCDF point observation file name with the Python command to be run. The Python command must begin with the prefix 'PYTHON_NUMPY=' and be followed by the path to the User's Python script and any arguments. The full command should be enclosed in single quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below: +The ASCII2NC tool also supports Python embedding, however invoking it varies slightly from other MET tools. For ASCII2NC, Python embedding is used by providing the "-format python" option on the command line. With this option, point observations may be passed as input. An example of this is shown below: .. code-block:: none - plot_point_obs \ - "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ - output_image.ps + ascii2nc -format python \ + "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + sample_ascii_obs_python.nc + Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: From f5a164d51f489df8c0b31175b1d025c7b5767a5f Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 11:05:18 -0600 Subject: [PATCH 56/81] #2474 renamed create_header_attrs to set_dataplane_attrs --- scripts/python/examples/read_ascii_numpy.py | 6 +++--- scripts/python/examples/read_ascii_numpy_grid.py | 6 +++--- scripts/python/examples/read_ascii_xarray.py | 6 +++--- scripts/python/met/dataplane.py | 5 +++-- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py index dad36f7800..a15fe17031 100644 --- a/scripts/python/examples/read_ascii_numpy.py +++ b/scripts/python/examples/read_ascii_numpy.py @@ -30,8 +30,8 @@ met_data = None print("Can't find the input file") -# attrs is a dictionary which contains header data for the dataplane. -# attrs should have 9 members with string type data: +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: # 'name': data name # 'long_name': descriptive name # 'valid': valid time (format = 'yyyymmdd_hhmmss') @@ -78,7 +78,7 @@ } long_name = data_name + "_word" -attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, lead_time, accum_time, v_level, units, grid_lambert_conformal, long_name) diff --git a/scripts/python/examples/read_ascii_numpy_grid.py b/scripts/python/examples/read_ascii_numpy_grid.py index 6e039b67b1..79e6829052 100644 --- a/scripts/python/examples/read_ascii_numpy_grid.py +++ b/scripts/python/examples/read_ascii_numpy_grid.py @@ -30,8 +30,8 @@ except NameError: print("Can't find the input file") -# attrs is a dictionary which contains header data for the dataplane. -# attrs should have 9 members with string type data: +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: # 'name': data name # 'long_name': descriptive name # 'valid': valid time (format = 'yyyymmdd_hhmmss') @@ -57,7 +57,7 @@ grid_info = os.path.expandvars(os.getenv('PYTHON_GRID')) long_name = data_name + "_word" -attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, lead_time, accum_time, v_level, units, grid_info, long_name) diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py index dbc4e4b33f..8998235ea1 100644 --- a/scripts/python/examples/read_ascii_xarray.py +++ b/scripts/python/examples/read_ascii_xarray.py @@ -37,8 +37,8 @@ ## create the metadata dictionary ## -# attrs is a dictionary which contains header data for the dataplane. -# attrs should have 9 members with string type data: +# attrs is a dictionary which contains attributes describing the dataplane. +# attrs should have 9 items, each of data type string: # 'name': data name # 'long_name': descriptive name # 'valid': valid time (format = 'yyyymmdd_hhmmss') @@ -85,7 +85,7 @@ } long_name = data_name + "_word" -attrs = dataplane.create_header_attrs(data_name, valid_time, init_time, +attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, lead_time, accum_time, v_level, units, grid_lambert_conformal) diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py index e454918d65..a9aed943b6 100644 --- a/scripts/python/met/dataplane.py +++ b/scripts/python/met/dataplane.py @@ -11,6 +11,7 @@ class dataplane(): ## #@staticmethod + # Python dictionary items: # 'name': data name # 'long_name': descriptive name # 'valid': valid time (format = 'yyyymmdd_hhmmss') @@ -24,7 +25,7 @@ class dataplane(): # - a gridded data file name # - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" # - a dictionary for the grid information - def create_header_attrs(data_name, valid_time, init_time, lead_time, + def set_dataplane_attrs(data_name, valid_time, init_time, lead_time, accum_time, v_level, units, grid_info, long_name=None): hdr_attrs = { @@ -34,7 +35,7 @@ def create_header_attrs(data_name, valid_time, init_time, lead_time, 'accum': accum_time, 'name': data_name, - 'long_name': long_name if long_name is not None and long_name != "" else data_name + '_word', + 'long_name': long_name if long_name is not None and long_name != "" else data_name + '_long', 'level': v_level, 'units': units, From d0516f8c2e89ad88c3f6bf557641adc3f2c50fc6 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 11:06:30 -0600 Subject: [PATCH 57/81] Removes subsection to consolidate info for point observations. --- docs/Users_Guide/appendixF.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index e7797b78a2..c5fb74974b 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -407,10 +407,7 @@ To provide the data that MET expects for point observations, the user is encoura - numeric - Observation Value -Creating the MET 11-column variable from a Pandas DataFrame -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To create the required object for MET, use the **.values** property of the Pandas DataFrame and the **.tolist()** method of the NumPy N-D Array. For example: +To create the variable for MET, use the **.values** property of the Pandas DataFrame and the **.tolist()** method of the NumPy N-D Array. For example: .. code-block:: Python :caption: Convert Pandas DataFrame to MET variable From 55dc1a96af101a79bf550757935cb111d997b233 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 11:07:00 -0600 Subject: [PATCH 58/81] #2474 Corrected API name, get_sample_point_obs --- scripts/python/examples/read_met_point_obs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/python/examples/read_met_point_obs.py b/scripts/python/examples/read_met_point_obs.py index b84248c34c..e16ccf2d86 100644 --- a/scripts/python/examples/read_met_point_obs.py +++ b/scripts/python/examples/read_met_point_obs.py @@ -29,11 +29,11 @@ start_time = datetime.now() point_obs_data = None -input_name = sys.argv[1] +input_name = sys.argv[1] if len(sys.argv) > 1 else None prompt = met_point_tools.get_prompt() if len(sys.argv) == 1 or ARG_PRINT_DATA == input_name: # This is an example of creating a sample data - point_obs_data = met_point_tools.get_sample_met_point_obs() + point_obs_data = met_point_tools.get_sample_point_obs() point_obs_data.read_data([]) elif met_point_tools.is_python_prefix(input_name): # This is an example of calling a python script for ascii2nc From 32d0f5bca81989bb44d3eeb218c4233d8c5c66c8 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 11:08:07 -0600 Subject: [PATCH 59/81] #2474 Formating (intend) --- scripts/python/pyembed/read_tmp_ascii.py | 26 +++++++------- scripts/python/pyembed/read_tmp_point_nc.py | 16 ++++----- scripts/python/pyembed/write_tmp_dataplane.py | 12 +++---- scripts/python/pyembed/write_tmp_mpr.py | 13 ++++--- scripts/python/pyembed/write_tmp_point.py | 10 +++--- scripts/python/pyembed/write_tmp_point_nc.py | 35 ++++++++++--------- 6 files changed, 55 insertions(+), 57 deletions(-) diff --git a/scripts/python/pyembed/read_tmp_ascii.py b/scripts/python/pyembed/read_tmp_ascii.py index b508f6cbef..1e9573171c 100644 --- a/scripts/python/pyembed/read_tmp_ascii.py +++ b/scripts/python/pyembed/read_tmp_ascii.py @@ -21,21 +21,21 @@ import argparse try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools except: - from pyembed.python_embedding import pyembed_tools + from pyembed.python_embedding import pyembed_tools def read_tmp_ascii(filename): - global ascii_data # defined at python_handler.cc (tmp_list_name) - ascii_data = pyembed_tools.read_tmp_ascii(filename) - return ascii_data + global ascii_data # defined at python_handler.cc (tmp_list_name) + ascii_data = pyembed_tools.read_tmp_ascii(filename) + return ascii_data if __name__ == '__main__': - """ - Parse command line arguments - """ - parser = argparse.ArgumentParser() - parser.add_argument('--filename', type=str) - args = parser.parse_args() - - data = read_tmp_ascii(args.filename) + """ + Parse command line arguments + """ + parser = argparse.ArgumentParser() + parser.add_argument('--filename', type=str) + args = parser.parse_args() + + data = read_tmp_ascii(args.filename) diff --git a/scripts/python/pyembed/read_tmp_point_nc.py b/scripts/python/pyembed/read_tmp_point_nc.py index d9df6dfdec..622405c520 100644 --- a/scripts/python/pyembed/read_tmp_point_nc.py +++ b/scripts/python/pyembed/read_tmp_point_nc.py @@ -10,21 +10,19 @@ from met.point import met_point_tools try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools except: - from pyembed.python_embedding import pyembed_tools - -#pyembed_tools.add_python_path(__file__) + from pyembed.python_embedding import pyembed_tools input_filename = sys.argv[1] # read NetCDF file print('{p} reading {f}'.format(p=met_point_tools.get_prompt(), f=input_filename)) try: - point_obs_data = met_point_tools.get_nc_point_obs() - point_obs_data.read_data(input_filename) + point_obs_data = met_point_tools.get_nc_point_obs() + point_obs_data.read_data(input_filename) - met_point_data = point_obs_data.get_point_data() - met_point_data['met_point_data'] = point_obs_data + met_point_data = point_obs_data.get_point_data() + met_point_data['met_point_data'] = point_obs_data except: - point_data = pyembed_tools.read_tmp_ascii(input_filename) + point_data = pyembed_tools.read_tmp_ascii(input_filename) diff --git a/scripts/python/pyembed/write_tmp_dataplane.py b/scripts/python/pyembed/write_tmp_dataplane.py index 150f69979b..991ca0c0fd 100644 --- a/scripts/python/pyembed/write_tmp_dataplane.py +++ b/scripts/python/pyembed/write_tmp_dataplane.py @@ -11,17 +11,17 @@ import sys try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools + pyembed_tools.add_python_path(__file__) except: - from pyembed.python_embedding import pyembed_tools + from pyembed.python_embedding import pyembed_tools -pyembed_tools.add_python_path(__file__) from met.dataplane import dataplane #def write_dataplane(met_in, netcdf_filename): # dataplane.write_dataplane(met_in, netcdf_filename) if __name__ == '__main__': - netcdf_filename = sys.argv[1] - met_in = pyembed_tools.call_python(sys.argv) - dataplane.write_dataplane(met_in, netcdf_filename) + netcdf_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + dataplane.write_dataplane(met_in, netcdf_filename) diff --git a/scripts/python/pyembed/write_tmp_mpr.py b/scripts/python/pyembed/write_tmp_mpr.py index da4f65fb03..0e6141b76c 100644 --- a/scripts/python/pyembed/write_tmp_mpr.py +++ b/scripts/python/pyembed/write_tmp_mpr.py @@ -9,15 +9,14 @@ ######################################################################## import sys - try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools except: - from pyembed.python_embedding import pyembed_tools + from pyembed.python_embedding import pyembed_tools if __name__ == '__main__': - argv_org = sys.argv[:] - tmp_filename = sys.argv[1] - met_in = pyembed_tools.call_python(sys.argv) + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) - pyembed_tools.write_tmp_ascii(tmp_filename, met_in.mpr_data) + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.mpr_data) diff --git a/scripts/python/pyembed/write_tmp_point.py b/scripts/python/pyembed/write_tmp_point.py index edcbdbd877..95f2992094 100644 --- a/scripts/python/pyembed/write_tmp_point.py +++ b/scripts/python/pyembed/write_tmp_point.py @@ -11,11 +11,11 @@ import sys try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools except: - from pyembed.python_embedding import pyembed_tools + from pyembed.python_embedding import pyembed_tools if __name__ == '__main__': - tmp_filename = sys.argv[1] - met_in = pyembed_tools.call_python(sys.argv) - pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) diff --git a/scripts/python/pyembed/write_tmp_point_nc.py b/scripts/python/pyembed/write_tmp_point_nc.py index c12683453e..6d6c69f693 100644 --- a/scripts/python/pyembed/write_tmp_point_nc.py +++ b/scripts/python/pyembed/write_tmp_point_nc.py @@ -12,26 +12,27 @@ import sys try: - from python_embedding import pyembed_tools + from python_embedding import pyembed_tools + pyembed_tools.add_python_path(__file__) except: - from pyembed.python_embedding import pyembed_tools + from pyembed.python_embedding import pyembed_tools + -pyembed_tools.add_python_path(__file__) from met.point import met_point_tools if __name__ == '__main__': - argv_org = sys.argv[:] - tmp_filename = sys.argv[1] - met_in = pyembed_tools.call_python(sys.argv) + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) - if hasattr(met_in, 'point_data'): - pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) - elif hasattr(met_in, 'point_obs_data'): - met_in.point_obs_data.save_ncfile(tmp_filename) - else: - if hasattr(met_in.met_point_data, 'point_obs_data'): - met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) - else: - tmp_point_obs = met_point_tools.get_nc_point_obs() - tmp_point_obs.put_data(met_in.met_point_data) - tmp_point_obs.save_ncfile(tmp_filename) + if hasattr(met_in, 'point_data'): + pyembed_tools.write_tmp_ascii(tmp_filename, met_in.point_data) + elif hasattr(met_in, 'point_obs_data'): + met_in.point_obs_data.save_ncfile(tmp_filename) + else: + if hasattr(met_in.met_point_data, 'point_obs_data'): + met_in.met_point_data['point_obs_data'].save_ncfile(tmp_filename) + else: + tmp_point_obs = met_point_tools.get_nc_point_obs() + tmp_point_obs.put_data(met_in.met_point_data) + tmp_point_obs.save_ncfile(tmp_filename) From 3f09e21544cd71f060784a20526949d2f4f07732 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 11:09:49 -0600 Subject: [PATCH 60/81] @2474 Updated comments --- scripts/python/examples/read_ascii_mpr.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/python/examples/read_ascii_mpr.py b/scripts/python/examples/read_ascii_mpr.py index cd964434fc..d166893c98 100644 --- a/scripts/python/examples/read_ascii_mpr.py +++ b/scripts/python/examples/read_ascii_mpr.py @@ -21,8 +21,10 @@ try: print("Input File:\t" + repr(input_file)) - # Read MPR lines by using pandas package, skipping the header row and - # first column. Input should be a 36 column text data. + # Read MPR lines by using the Pandas Python package, + # skipping the header row and first column. + # Input should be a 36 column text data matching the MPR line-type + # output from MET tools. mpr_data = mpr_data.read_mpr(input_file, col_start=1, col_last=36, skiprows=1) print("Data Length:\t" + repr(len(mpr_data))) print("Data Type:\t" + repr(type(mpr_data))) From d4c8844a8a709c372fa954ba131adc14071af1ac Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 11:11:00 -0600 Subject: [PATCH 61/81] #2474 Removed the convertion option --- scripts/python/examples/read_ascii_point.py | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/scripts/python/examples/read_ascii_point.py b/scripts/python/examples/read_ascii_point.py index 05f5edf340..88e9e31ce6 100644 --- a/scripts/python/examples/read_ascii_point.py +++ b/scripts/python/examples/read_ascii_point.py @@ -1,7 +1,7 @@ import os import sys -from met.point import met_point_tools as tools +from met.point import met_point_tools ######################################################################## @@ -17,14 +17,7 @@ print("ERROR: read_ascii_point.py -> Missing an input file.") sys.exit(1) -do_convert = False last_index = 2 -if arg_cnt > last_index: - opt_convert = sys.argv[2] - if opt_convert.lower() == "do_convert" or opt_convert.lower() == "convert": - do_convert = True - last_index += 1 - if last_index < arg_cnt: print(" INFO: read_ascii_point.py -> Too many argument, ignored {o}.".format( o=' '.join(sys.argv[last_index:]))) @@ -48,13 +41,9 @@ # (11) numeric: Observation_Value # Read 11 column text input data by using pandas package - point_data = tools.read_text_point_obs(input_file) + point_data = met_point_tools.read_text_point_obs(input_file) print(" point_data: Data Length:\t" + repr(len(point_data))) print(" point_data: Data Type:\t" + repr(type(point_data))) - if do_convert: - # Convert 11 column list to MET's internal python instance - met_point_data = tools.convert_point_data(point_data) - print(" met_point_data: Data Type:\t" + repr(type(met_point_data))) except FileNotFoundError: print(f"The input file {input_file} does not exist") sys.exit(1) From 9b1134cda581756d0b1f4af330ff3cf954e71d2a Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 11:38:53 -0600 Subject: [PATCH 62/81] Cleans up code-block formatting and adds captions to relevant code blocks that might be nice to link to since with a caption, the code block has a copyable link to share. --- docs/Users_Guide/appendixF.rst | 136 +++++++++++++++++---------------- 1 file changed, 70 insertions(+), 66 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index c5fb74974b..cfc9057615 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -48,8 +48,9 @@ When MET is compiled with Python embedding support, MET uses the Python executab If a user's Python script requires packages that are not available in the Python installation used when compiling the MET software, they will encounter a runtime error when using MET. In this instance, the user will need to change the Python MET is using to a different installation with the required packages for their script. It is the responsibility of the user to manage this Python installation, and one popular approach is to use a custom Anaconda (Conda) Python environment. Once the Python installation meeting the user's requirements is available, the user can force MET to use it by setting the **MET_PYTHON_EXE** environment variable to the full path of the Python executable in that installation. For example: .. code-block:: none + :caption: Setting MET_PYTHON_EXE - export MET_PYTHON_EXE=/usr/local/python3/bin/python3 + export MET_PYTHON_EXE=/usr/local/python3/bin/python3 Setting this environment variable triggers slightly different processing logic in MET than when MET uses the Python installation that was used when compiling MET. When using the Python installation that was used when compiling MET, Python is called directly and data are passed in memory from Python to the MET tools. When the user sets **MET_PYTHON_EXE**, MET does the following: @@ -150,20 +151,23 @@ The grid entry in the **attrs** dictionary must contain the grid size and projec • Using a named grid supported by MET: .. code-block:: none + :caption: Named Grid - 'grid': 'G212' + 'grid': 'G212' • As a grid specification string, as described in :ref:`appendixB`: .. code-block:: none + :caption: Grid Specification String - 'grid': 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N' + 'grid': 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N' • As the path to an existing gridded data file: .. code-block:: none + :caption: Grid From File - 'grid': '/path/to/sample_data.grib' + 'grid': '/path/to/sample_data.grib' When specified as a dictionary, the contents of the **grid** entry vary based upon the grid **type**. The required elements for supported grid types are: @@ -238,39 +242,39 @@ Additional information about supported grids can be found in :ref:`appendixB`. Finally, an example **attrs** dictionary is shown below: .. code-block:: none - - attrs = { - - 'valid': '20050807_120000', - 'init': '20050807_000000', - 'lead': '120000', - 'accum': '120000', - - 'name': 'Foo', - 'long_name': 'FooBar', - 'level': 'Surface', - 'units': 'None', + :caption: Sample Attrs Dictionary + + attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': 'Foo', + 'long_name': 'FooBar', + 'level': 'Surface', + 'units': 'None', - # Define 'grid' as a string or a dictionary + # Define 'grid' as a string or a dictionary - 'grid': { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - 'name': 'FooGrid', - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - 'lat_pin': 12.19, - 'lon_pin': -135.459, - 'x_pin': 0.0, - 'y_pin': 0.0, - 'lon_orient': -95.0, - 'd_km': 40.635, - 'r_km': 6371.2, - 'nx': 185, - 'ny': 129, - } - - } + 'grid': { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + 'name': 'FooGrid', + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + 'lat_pin': 12.19, + 'lon_pin': -135.459, + 'x_pin': 0.0, + 'y_pin': 0.0, + 'lon_orient': -95.0, + 'd_km': 40.635, + 'r_km': 6371.2, + 'nx': 185, + 'ny': 129, + } + } Running Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -280,26 +284,27 @@ On the command line for any of the MET tools which will be obtaining its data fr Listed below is an example of running the Plot-Data-Plane tool to call a Python script for data that is included with the MET release tarball. Assuming the MET executables are in your path, this example may be run from the top-level MET source code directory: .. code-block:: none + :caption: plot_data_plane Python Embedding - plot_data_plane PYTHON_NUMPY fcst.ps \ - 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ - -title "Python enabled plot_data_plane" + plot_data_plane PYTHON_NUMPY fcst.ps \ + 'name="scripts/python/examples/read_ascii_numpy.py data/python/fcst.txt FCST";' \ + -title "Python enabled plot_data_plane" The first argument for the Plot-Data-Plane tool is the gridded data file to be read. When calling Python script that has a two-dimensional gridded dataplane stored in a NumPy N-D array object, set this to the constant string **PYTHON_NUMPY**. The second argument is the name of the output PostScript file to be written. The third argument is a string describing the data to be plotted. When calling a Python script, set **name** to the full path of the Python script to be run along with any command line arguments for that script. Lastly, the **-title** option is used to add a title to the plot. Note that any print statements included in the Python script will be printed to the screen. The above example results in the following log messages: .. code-block:: none - DEBUG 1: Opening data file: PYTHON_NUMPY - Input File: 'data/python/fcst.txt' - Data Name : 'FCST' - Data Shape: (129, 185) - Data Type: dtype('float64') - Attributes: {'name': 'FCST', 'long_name': 'FCST_word', - 'level': 'Surface', 'units': 'None', - 'init': '20050807_000000', 'valid': '20050807_120000', - 'lead': '120000', 'accum': '120000' - 'grid': {...} } - DEBUG 1: Creating postscript file: fcst.ps + DEBUG 1: Opening data file: PYTHON_NUMPY + Input File: 'data/python/fcst.txt' + Data Name : 'FCST' + Data Shape: (129, 185) + Data Type: dtype('float64') + Attributes: {'name': 'FCST', 'long_name': 'FCST_word', + 'level': 'Surface', 'units': 'None', + 'init': '20050807_000000', 'valid': '20050807_120000', + 'lead': '120000', 'accum': '120000' + 'grid': {...} } + DEBUG 1: Creating postscript file: fcst.ps Special Case for Ensemble-Stat, Series-Analysis, and MTD ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -309,11 +314,12 @@ Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a diffe On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in **PYTHON_NUMPY** or **PYTHON_XARRAY** on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: .. code-block:: none - - plot_data_plane data/python/fcst.txt fcst.ps \ - 'name="scripts/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ - file_type=PYTHON_NUMPY;' \ - -title "Python enabled plot_data_plane" + :caption: plot_data_plane Python Embedding using MET_PYTHON_INPUT_ARG + + plot_data_plane data/python/fcst.txt fcst.ps \ + name="scripts/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ + file_type=PYTHON_NUMPY;' \ + -title "Python enabled plot_data_plane" Examples of Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -321,12 +327,12 @@ Examples of Python Embedding for 2D Gridded Dataplanes **GridStat with Python embedding for forecast and observations** .. code-block:: none - :caption: GridStat Command + :caption: GridStat Command with Dual Python Embedding grid_stat 'PYTHON_NUMPY' 'PYTHON_NUMPY' GridStat_config -outdir /path/to/output .. code-block:: none - :caption: GridStat Config + :caption: GridStat Config with Dual Python Embedding fcst = { field = [ @@ -344,10 +350,6 @@ Examples of Python Embedding for 2D Gridded Dataplanes ]; } -1. TODO: MET commands -2. TODO: Python Scripts -3. TODO: Scripts and stuff from the DTC webpage? - .. _pyembed-point-obs-data: Python Embedding for Point Observations @@ -424,18 +426,20 @@ Running Python Embedding for Point Observations The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools support Python embedding for point observations. Python embedding for these tools can be invoked directly on the command line by replacing the input MET NetCDF point observation file name with the **full path** to the Python script and any arguments. The Python command must begin with the prefix **PYTHON_NUMPY=**. The full command should be enclosed in quotes to prevent embedded whitespace from causing parsing errors. An example of this is shown below for Plot-Point-Obs: .. code-block:: none + :caption: plot_point_obs with Python Embedding - plot_point_obs \ - "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ - output_image.ps + plot_point_obs \ + "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + output_image.ps The ASCII2NC tool also supports Python embedding, however invoking it varies slightly from other MET tools. For ASCII2NC, Python embedding is used by providing the "-format python" option on the command line. With this option, point observations may be passed as input. An example of this is shown below: .. code-block:: none + :caption: ascii2nc with Python Embedding - ascii2nc -format python \ - "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ - sample_ascii_obs_python.nc + ascii2nc -format python \ + "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + sample_ascii_obs_python.nc Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: From 7d704984821a71245c081ef56d4e7c55dd44a883 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 11:54:35 -0600 Subject: [PATCH 63/81] Adds example of dual Python embedding for PointStat. --- docs/Users_Guide/appendixF.rst | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index cfc9057615..5d3be5f691 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -451,7 +451,31 @@ Both of the above examples use the **read_ascii_point.py** example script which Examples of Python Embedding for Point Observations ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -TODO: Add some more examples here? +**PointStat with Python embedding for forecast and observations** + +.. code-block:: none + :caption: PointStat Command with Dual Python Embedding + + point_stat 'PYTHON_NUMPY' 'PYTHON_NUMPY' PointStat_config -outdir /path/to/output + +.. code-block:: none + :caption: PointStat Config with Dual Python Embedding + + fcst = { + field = [ + { + name = "/path/to/fcst/python/script.py python_arg1 python_arg2"; + } + ]; + } + + obs = { + field = [ + { + name = "/path/to/obs/python/script.py python_arg1 python_arg2"; + } + ]; + } .. _pyembed-mpr-data: From abad3221130dcc7601ef8aa747eaa66eff9754d9 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 12:31:26 -0600 Subject: [PATCH 64/81] Updates to the MPR section. --- docs/Users_Guide/appendixF.rst | 55 ++++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 5d3be5f691..4a3b56ed53 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -441,7 +441,6 @@ The ASCII2NC tool also supports Python embedding, however invoking it varies sli "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ sample_ascii_obs_python.nc - Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: • MET installation directory in *scripts/python/examples*. @@ -479,10 +478,55 @@ Examples of Python Embedding for Point Observations .. _pyembed-mpr-data: -Python Embedding for MPR data +Python Embedding for MPR Data ----------------------------- -The Stat-Analysis tool supports the "-lookin python" option. With this option, matched pair (MPR) data may be passed as input. An example of this is provided in :numref:`StA-pyembed`. That example uses the **read_ascii_mpr.py** sample script which is included with the MET code. It reads MPR data and stores it in a Pandas dataframe to be read by the Stat-Analysis tool with Python. +The MET Stat-Analysis tool also supports Python embedding. By using the command line option **-lookin python**, Stat-Analysis can read matched pair (MPR) data formatted in the MET MPR line-type format via Python. + +.. note:: + + This functionality assumes you are passing only the MPR line type information, and not other statistical line types. Sometimes users configure MET tools to write the MPR line type to the STAT file (along with all other line types). The example below will not work for those files, but rather only files from MET tools containing just the MPR line type information, or optionally, data in another format that the user adapts to the MPR line type format. + +Python Script Requirements for MPR Data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The data must be stored in a variable with the name **mpr_data** + +2. The **mpr_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame + +3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`_table_PS_header_info_point-stat_out`. + +If a user does not have an existing MPR line type file created by the MET tools, they will need to map their data into the 36 columns expected by Stat-Analysis for the MPR line type data. If a user already has MPR line type files, the most direct way for a user to read MPR line type data is to model their Python script after the sample **read_ascii_mpr.py** script. Sample code is included here for convenience: + +.. code-block:: Python + :caption: Reading MPR line types with Pandas + + # Open the MPR line type file + mpr_dataframe = pd.read_csv(input_mpr_file,\ + header=None,\ + delim_whitespace=True,\ + keep_default_na=False,\ + skiprows=1,\ + usecols=range(1,36),\ + dtype=str) + + # Convert to the variable MET expects + met_data = mpr_dataframe.values.tolist() + +Running Python Embedding for MPR Data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Stat-Analysis can be run using the **-lookin python** command line option: + +.. code-block:: none + :caption: Stat-Analysis with Python Embedding of MPR Data + + stat_analysis \ + -lookin python MET_BASE/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ + -job aggregate_stat -line_type MPR -out_line_type CNT \ + -by FCST_VAR,FCST_LEV + +In this example, rather than passing the MPR output lines from Point-Stat directly into Stat-Analysis (which is the typical approach), the **read_ascii_mpr.py** Python embedding script reads that file and passes the data to Stat-Analysis. The aggregate_stat job is defined on the command line and CNT statistics are derived from the MPR input data. Separate CNT statistics are computed for each unique combination of FCST_VAR and FCST_LEV present in the input. The **read_ascii_mpr.py** sample script can be found in: @@ -490,6 +534,11 @@ The **read_ascii_mpr.py** sample script can be found in: • `MET GitHub repository `_ in *MET/scripts/python/examples*. +Examples of Python Embedding for MPR Data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +TODO: Is there another example that might be useful here? Probably not I suppose. + MET Python Module ================= From 38a74783bb861f9e6c4dc84738c43079f718b707 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 12:41:34 -0600 Subject: [PATCH 65/81] Removes Python embedding example and moves to Appendix F. --- docs/Users_Guide/stat-analysis.rst | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/docs/Users_Guide/stat-analysis.rst b/docs/Users_Guide/stat-analysis.rst index 602920c22d..8488dd39df 100644 --- a/docs/Users_Guide/stat-analysis.rst +++ b/docs/Users_Guide/stat-analysis.rst @@ -9,7 +9,7 @@ Introduction The Stat-Analysis tool ties together results from the Point-Stat, Grid-Stat, Ensemble-Stat, Wavelet-Stat, and TC-Gen tools by providing summary statistical information and a way to filter their STAT output files. It processes the STAT output created by the other MET tools in a variety of ways which are described in this section. -MET version 9.0 adds support for the passing matched pair data (MPR) into Stat-Analysis using a Python script with the "-lookin python ..." option. An example of running Stat-Analysis with Python embedding is shown in :numref:`stat_analysis-usage`. +MET version 9.0 adds support for the passing matched pair data (MPR) into Stat-Analysis using a Python script with the "-lookin python ..." option. An example of running Stat-Analysis with Python embedding can be found in :numref:`Appendix F, Section %s `. Scientific and statistical aspects ================================== @@ -287,7 +287,7 @@ In the usage statement for the Stat-Analysis tool, some additional terminology i Required arguments for stat_analysis ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. The **-lookin path** specifies the name of a directory to be searched recursively for STAT files (ending in ".stat") or any explicit file name with any suffix (such as "_ctc.txt") to be read. This option may be used multiple times to specify multiple directories and/or files to be read. If "-lookin python" is used, it must be followed by a Python embedding script and any command line arguments it takes. Python embedding can be used to pass matched pair (MPR) lines as input to Stat-Analysis. +1. The **-lookin path** specifies the name of a directory to be searched recursively for STAT files (ending in ".stat") or any explicit file name with any suffix (such as "_ctc.txt") to be read. This option may be used multiple times to specify multiple directories and/or files to be read. If "-lookin python" is used, it must be followed by a Python embedding script and any command line arguments it takes. Python embedding can be used to pass **only** matched pair (MPR) lines as input to Stat-Analysis. 2. Either a configuration file must be specified with the **-config** option, or a **JOB COMMAND LINE** must be denoted. The **JOB COMMAND LINE** is described in :numref:`stat_analysis-configuration-file` @@ -313,22 +313,6 @@ An example of the stat_analysis calling sequence is shown below. In this example, the Stat-Analysis tool will search for valid STAT lines located in the *../out/point_stat* directory that meet the options specified in the configuration file, *config/STATAnalysisConfig*. -.. _StA-pyembed: - -Python Embedding for Matched Pairs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The example below uses Python embedding. - -.. code-block:: none - - stat_analysis \ - -lookin python MET_BASE/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ - -job aggregate_stat -line_type MPR -out_line_type CNT \ - -by FCST_VAR,FCST_LEV - -In this example, rather than passing the MPR output lines from Point-Stat directly into Stat-Analysis (which is the typical approach), the read_ascii_mpr.py Python embedding script reads that file and passes the data to Stat-Analysis. The aggregate_stat job is defined on the command line and CNT statistics are derived from the MPR input data. Separate CNT statistics are computed for each unique combination of FCST_VAR and FCST_LEV present in the input. Please refer to :numref:`Appendix F, Section %s ` for more details about Python embedding in MET. - .. _stat_analysis-configuration-file: stat_analysis configuration file From aadfb92bbea6b106f39162e3ba9d2198857bbf17 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 12:44:00 -0600 Subject: [PATCH 66/81] Corrects RST error. --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 4a3b56ed53..286cafa0e8 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -494,7 +494,7 @@ Python Script Requirements for MPR Data 2. The **mpr_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame -3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`_table_PS_header_info_point-stat_out`. +3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`_table_PS_header_info_point-stat_out` and the :ref:`_table_PS_format_info_MPR`. If a user does not have an existing MPR line type file created by the MET tools, they will need to map their data into the 36 columns expected by Stat-Analysis for the MPR line type data. If a user already has MPR line type files, the most direct way for a user to read MPR line type data is to model their Python script after the sample **read_ascii_mpr.py** script. Sample code is included here for convenience: From e1796426ffd28bf88b1ea7e73b413dfa8b3e2098 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Fri, 14 Apr 2023 12:47:58 -0600 Subject: [PATCH 67/81] RST link errors. --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 286cafa0e8..d9cf6bbd2d 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -494,7 +494,7 @@ Python Script Requirements for MPR Data 2. The **mpr_data** variable must be a Python list representation of a NumPy N-D Array created from a Pandas DataFrame -3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`_table_PS_header_info_point-stat_out` and the :ref:`_table_PS_format_info_MPR`. +3. The **met_data** variable must have data in **exactly** 36 columns, corresponding to the summation of the :ref:`common STAT output` and the :ref:`MPR line type output`. If a user does not have an existing MPR line type file created by the MET tools, they will need to map their data into the 36 columns expected by Stat-Analysis for the MPR line type data. If a user already has MPR line type files, the most direct way for a user to read MPR line type data is to model their Python script after the sample **read_ascii_mpr.py** script. Sample code is included here for convenience: From 47af55dd4c5051d7a527c41b7daf70f9c99ffde6 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 15:57:09 -0600 Subject: [PATCH 68/81] #2474 Formating (intend), Moved convert_point_data to under met_point_tools --- scripts/python/met/point.py | 1768 ++++++++++++++++++----------------- 1 file changed, 887 insertions(+), 881 deletions(-) diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py index 77bfcadd1f..16e0b6c047 100644 --- a/scripts/python/met/point.py +++ b/scripts/python/met/point.py @@ -55,923 +55,929 @@ def read_data(data_filename): COUNT_SHOW = 30 class base_met_point_obs(object): - ''' - classdocs - ''' - ERROR_P = " ==PYTHON_ERROR==" - INFO_P = " ==PYTHON_INFO==" - - python_prefix = 'PYTHON_POINT_USER' - - FILL_VALUE = -9999. - - def __init__(self, use_var_id=True): - ''' - Constructor - ''' - self.count_info = "" - self.input_name = None - self.ignore_input_file = False - self.use_var_id = use_var_id # True if variable index, False if GRIB code - self.error_msg = "" - self.has_error = False - - # Header - self.nhdr = 0 - self.npbhdr = 0 - self.nhdr_typ = 0 # type table - self.nhdr_sid = 0 # station_id table - self.nhdr_vld = 0 # valid time strings - self.hdr_typ = [] # (nhdr) integer - self.hdr_sid = [] # (nhdr) integer - self.hdr_vld = [] # (nhdr) integer - self.hdr_lat = [] # (nhdr) float - self.hdr_lon = [] # (nhdr) float - self.hdr_elv = [] # (nhdr) float - self.hdr_typ_table = [] # (nhdr_typ, mxstr2) string - self.hdr_sid_table = [] # (nhdr_sid, mxstr2) string - self.hdr_vld_table = [] # (nhdr_vld, mxstr) string - - #Observation data - self.nobs = 0 - self.nobs_qty = 0 - self.nobs_var = 0 - self.obs_qty = [] # (nobs_qty) integer, index of self.obs_qty_table - self.obs_hid = [] # (nobs) integer - self.obs_vid = [] # (nobs) integer, veriable index from self.obs_var_table or GRIB code - self.obs_lvl = [] # (nobs) float - self.obs_hgt = [] # (nobs) float - self.obs_val = [] # (nobs) float - self.obs_qty_table = [] # (nobs_qty, mxstr) string - self.obs_var_table = [] # (nobs_var, mxstr2) string, required if self.use_var_id is True - self.obs_var_unit = [] # (nobs_var, mxstr2) string, optional if self.use_var_id is True - self.obs_var_desc = [] # (nobs_var, mxstr3) string, optional if self.use_var_id is True - - # Optional variables for PREPBUFR, not supported yet - self.hdr_prpt_typ = [] # optional - self.hdr_irpt_typ = [] # optional - self.hdr_inst_typ = [] # optional - - def add_error_msg(self, error_msg): - self.has_error = True - self.log_error_msg(error_msg) - if 0 == len(self.error_msg): - self.error_msg = error_msg - else: - self.error_msg = "{m1}\n{m2}".format(m1=self.error_msg, m2=error_msg) - - def add_error_msgs(self, error_msgs): - self.has_error = True - for error_msg in error_msgs: - self.add_error_msg(error_msg) - - def check_data_member_float(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (float)".format(v=var_name)) - elif isinstance(local_var, list): - if isinstance(local_var[0], str) and not self.is_number(local_var[0]): - self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int or float only)".format( - n=var_name, v=local_var[0])) - elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], (int, float)): - self.add_error_msg("Not supported data type ({t}) for {v}[0] (int or float only)".format( - v=var_name, t=type(local_var[0]))) - elif not self.is_numpy_array(local_var): - self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( - v=var_name, t=type(local_var))) - - def check_data_member_int(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (int)".format(v=var_name)) - elif isinstance(local_var, list): - if isinstance(local_var[0], str) and not self.is_number(local_var[0]): - self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int only)".format( - n=var_name, v=local_var[0])) - elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], int): - self.add_error_msg("Not supported data type ({t}) for {v}[0] (int only)".format( - v=var_name, t=type(local_var[0]))) - elif not self.is_numpy_array(local_var): - self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( - v=var_name, t=type(local_var))) - - def check_data_member_string(self, local_var, var_name): - if 0 == len(local_var): - self.add_error_msg("{v} is empty (string)".format(v=var_name)) - elif not isinstance(local_var, (list)): - self.add_error_msg("Not supported data type ({t}) for {v} (list)".format( - v=var_name, t=type(local_var))) - - def check_point_data(self): - if not self.ignore_input_file and self.input_name is not None and not os.path.exists(self.input_name): - self.add_error_msg('The netcdf input {f} does not exist'.format(f=self.input_name)) - else: - self.check_data_member_int(self.hdr_typ,'hdr_typ') - self.check_data_member_int(self.hdr_sid,'hdr_sid') - self.check_data_member_int(self.hdr_vld,'hdr_vld') - self.check_data_member_float(self.hdr_lat,'hdr_lat') - self.check_data_member_float(self.hdr_lon,'hdr_lon') - self.check_data_member_float(self.hdr_elv,'hdr_elv') - self.check_data_member_string(self.hdr_typ_table,'hdr_typ_table') - self.check_data_member_string(self.hdr_sid_table,'hdr_sid_table') - self.check_data_member_string(self.hdr_vld_table,'hdr_vld_table') - - self.check_data_member_int(self.obs_qty,'obs_qty') - self.check_data_member_int(self.obs_hid,'obs_hid') - self.check_data_member_int(self.obs_vid,'obs_vid') - self.check_data_member_float(self.obs_lvl,'obs_lvl') - self.check_data_member_float(self.obs_hgt,'obs_hgt') - self.check_data_member_float(self.obs_val,'obs_val') - self.check_data_member_string(self.obs_qty_table,'obs_qty_table') - if self.use_var_id: - self.check_data_member_string(self.obs_var_table,'obs_var_table') - - def convert_to_numpy(self, value_list): - return np.array(value_list) - - def dump(self): - base_met_point_obs.print_point_data(self.get_point_data()) - - def get_count_string(self): - return f' nobs={self.nobs} nhdr={self.nhdr} ntyp={self.nhdr_typ} nsid={self.nhdr_sid} nvld={self.nhdr_vld} nqty={self.nobs_qty} nvar={self.nobs_var}' - - def get_point_data(self): - if self.nhdr <= 0: - self.nhdr = len(self.hdr_lat) - if self.nobs <= 0: - self.nobs = len(self.obs_val) - if self.nhdr_typ <= 0: - self.nhdr_typ = len(self.hdr_typ_table) - if self.nhdr_sid <= 0: - self.nhdr_sid = len(self.hdr_sid_table) - if self.nhdr_vld <= 0: - self.nhdr_vld = len(self.hdr_vld_table) - if self.npbhdr <= 0: - self.npbhdr = len(self.hdr_prpt_typ) - if self.nobs_qty <= 0: - self.nobs_qty = len(self.obs_qty_table) - if self.nobs_var <= 0: - self.nobs_var = len(self.obs_var_table) - self.check_point_data() - - if not self.is_numpy_array(self.hdr_typ): - self.hdr_typ = self.convert_to_numpy(self.hdr_typ) - if not self.is_numpy_array(self.hdr_sid): - self.hdr_sid = self.convert_to_numpy(self.hdr_sid) - if not self.is_numpy_array(self.hdr_vld): - self.hdr_vld = self.convert_to_numpy(self.hdr_vld) - if not self.is_numpy_array(self.hdr_lat): - self.hdr_lat = self.convert_to_numpy(self.hdr_lat) - if not self.is_numpy_array(self.hdr_lon): - self.hdr_lon = self.convert_to_numpy(self.hdr_lon) - if not self.is_numpy_array(self.hdr_elv): - self.hdr_elv = self.convert_to_numpy(self.hdr_elv) - - if not self.is_numpy_array(self.obs_qty): - self.obs_qty = self.convert_to_numpy(self.obs_qty) - if not self.is_numpy_array(self.obs_hid): - self.obs_hid = self.convert_to_numpy(self.obs_hid) - if not self.is_numpy_array(self.obs_vid): - self.obs_vid = self.convert_to_numpy(self.obs_vid) - if not self.is_numpy_array(self.obs_lvl): - self.obs_lvl = self.convert_to_numpy(self.obs_lvl) - if not self.is_numpy_array(self.obs_hgt): - self.obs_hgt = self.convert_to_numpy(self.obs_hgt) - if not self.is_numpy_array(self.obs_val): - self.obs_val = self.convert_to_numpy(self.obs_val) - - self.count_info = self.get_count_string() - self.met_point_data = self - return self.__dict__ - - def is_number(self, num_str): - return num_str.replace('-','1').replace('+','2').replace('.','3').isdigit() - - def is_numpy_array(self, var): - return isinstance(var, np.ndarray) - - def log_error_msg(self, err_msg): - base_met_point_obs.error_msg(err_msg) - - def log_error(self, err_msgs): - print(self.ERROR_P) - for err_line in err_msgs.split('\n'): - self.log_error_msg(err_line) - print(self.ERROR_P) - - def log_info(self, info_msg): - base_met_point_obs.info_msg(info_msg) - - def put_data(self, point_obs_dict): - self.use_var_id = point_obs_dict['use_var_id'] - self.hdr_typ = point_obs_dict['hdr_typ'] - self.hdr_sid = point_obs_dict['hdr_sid'] - self.hdr_vld = point_obs_dict['hdr_vld'] - self.hdr_lat = point_obs_dict['hdr_lat'] - self.hdr_lon = point_obs_dict['hdr_lon'] - self.hdr_elv = point_obs_dict['hdr_elv'] - self.hdr_typ_table = point_obs_dict['hdr_typ_table'] - self.hdr_sid_table = point_obs_dict['hdr_sid_table'] - self.hdr_vld_table = point_obs_dict['hdr_vld_table'] - - #Observation data - self.obs_qty = point_obs_dict['obs_qty'] - self.obs_hid = point_obs_dict['obs_hid'] - self.obs_lvl = point_obs_dict['obs_lvl'] - self.obs_hgt = point_obs_dict['obs_hgt'] - self.obs_val = point_obs_dict['obs_val'] - self.obs_vid = point_obs_dict['obs_vid'] - self.obs_var_table = point_obs_dict['obs_var_table'] - self.obs_qty_table = point_obs_dict['obs_qty_table'] - po_array = point_obs_dict.get('obs_unit', None) - if po_array is not None: - self.obs_var_unit = po_array - po_array = point_obs_dict.get('obs_desc', None) - if po_array is not None: - self.obs_var_desc = po_array - - po_array = point_obs_dict.get('hdr_prpt_typ', None) - if po_array is not None: - self.hdr_prpt_typ = po_array - po_array = point_obs_dict.get('hdr_irpt_typ', None) - if po_array is not None: - self.hdr_irpt_typ = po_array - po_array = point_obs_dict.get('hdr_inst_typ', None) - if po_array is not None: - self.hdr_inst_typ = po_array - - @staticmethod - def get_prompt(): - return " python:" - - @staticmethod - def error_msg(msg): - print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.ERROR_P} {msg}') - - @staticmethod - def info_msg(msg): - print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.INFO_P} {msg}') - - @staticmethod - def get_python_script(arg_value): - return arg_value[len(met_point_obs.python_prefix)+1:] - - @staticmethod - def is_python_script(arg_value): - return arg_value.startswith(met_point_obs.python_prefix) - - @staticmethod - def print_data(key, data_array, show_count=COUNT_SHOW): - if isinstance(data_array, list): - data_len = len(data_array) - if show_count >= data_len: - print(" {k:10s}: {v}".format(k=key, v= data_array)) - else: - end_offset = int(show_count/2) - print(" {k:10s}: count={v}".format(k=key, v=data_len)) - print(" {k:10s}[0:{o}] {v}".format(k=key, v=data_array[:end_offset], o=end_offset)) - print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v='...', s=end_offset+1, e=data_len-end_offset-1)) - print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v= data_array[-end_offset:], s=(data_len-end_offset), e=(data_len-1))) - else: - print(" {k:10s}: {v}".format(k=key, v= data_array)) - - @staticmethod - def print_point_data(met_point_data, print_subset=True): - print(' === MET point data by python embedding ===') - if print_subset: - met_point_obs.print_data('nhdr',met_point_data['nhdr']) - met_point_obs.print_data('nobs',met_point_data['nobs']) - met_point_obs.print_data('use_var_id',met_point_data['use_var_id']) - met_point_obs.print_data('hdr_typ',met_point_data['hdr_typ']) - met_point_obs.print_data('hdr_typ_table',met_point_data['hdr_typ_table']) - met_point_obs.print_data('hdr_sid',met_point_data['hdr_sid']) - met_point_obs.print_data('hdr_sid_table',met_point_data['hdr_sid_table']) - met_point_obs.print_data('hdr_vld',met_point_data['hdr_vld']) - met_point_obs.print_data('hdr_vld_table',met_point_data['hdr_vld_table']) - met_point_obs.print_data('hdr_lat',met_point_data['hdr_lat']) - met_point_obs.print_data('hdr_lon',met_point_data['hdr_lon']) - met_point_obs.print_data('hdr_elv',met_point_data['hdr_elv']) - met_point_obs.print_data('obs_hid',met_point_data['obs_hid']) - met_point_obs.print_data('obs_vid',met_point_data['obs_vid']) - met_point_obs.print_data('obs_var_table',met_point_data['obs_var_table']) - met_point_obs.print_data('obs_qty',met_point_data['obs_qty']) - met_point_obs.print_data('obs_qty_table',met_point_data['obs_qty_table']) - met_point_obs.print_data('obs_lvl',met_point_data['obs_lvl']) - met_point_obs.print_data('obs_hgt',met_point_data['obs_hgt']) - met_point_obs.print_data('obs_val',met_point_data['obs_val']) - else: - print('All',met_point_data) - print(" nhdr: ",met_point_data['nhdr']) - print(" nobs: ",met_point_data['nobs']) - print(' use_var_id: ',met_point_data['use_var_id']) - print(' hdr_typ: ',met_point_data['hdr_typ']) - print('hdr_typ_table: ',met_point_data['hdr_typ_table']) - print(' hdr_sid: ',met_point_data['hdr_sid']) - print('hdr_sid_table: ',met_point_data['hdr_sid_table']) - print(' hdr_vld: ',met_point_data['hdr_vld']) - print('hdr_vld_table: ',met_point_data['hdr_vld_table']) - print(' hdr_lat: ',met_point_data['hdr_lat']) - print(' hdr_lon: ',met_point_data['hdr_lon']) - print(' hdr_elv: ',met_point_data['hdr_elv']) - print(' obs_hid: ',met_point_data['obs_hid']) - print(' obs_vid: ',met_point_data['obs_vid']) - print('obs_var_table: ',met_point_data['obs_var_table']) - print(' obs_qty: ',met_point_data['obs_qty']) - print('obs_qty_table: ',met_point_data['obs_qty_table']) - print(' obs_lvl: ',met_point_data['obs_lvl']) - print(' obs_hgt: ',met_point_data['obs_hgt']) - print(' obs_val: ',met_point_data['obs_val']) - - print(' === MET point data by python embedding ===') + ''' + classdocs + ''' + ERROR_P = " ==PYTHON_ERROR==" + INFO_P = " ==PYTHON_INFO==" + + python_prefix = 'PYTHON_POINT_USER' + + FILL_VALUE = -9999. + + def __init__(self, use_var_id=True): + ''' + Constructor + ''' + self.count_info = "" + self.input_name = None + self.ignore_input_file = False + self.use_var_id = use_var_id # True if variable index, False if GRIB code + self.error_msg = "" + self.has_error = False + + # Header + self.nhdr = 0 + self.npbhdr = 0 + self.nhdr_typ = 0 # type table + self.nhdr_sid = 0 # station_id table + self.nhdr_vld = 0 # valid time strings + self.hdr_typ = [] # (nhdr) integer + self.hdr_sid = [] # (nhdr) integer + self.hdr_vld = [] # (nhdr) integer + self.hdr_lat = [] # (nhdr) float + self.hdr_lon = [] # (nhdr) float + self.hdr_elv = [] # (nhdr) float + self.hdr_typ_table = [] # (nhdr_typ, mxstr2) string + self.hdr_sid_table = [] # (nhdr_sid, mxstr2) string + self.hdr_vld_table = [] # (nhdr_vld, mxstr) string + + #Observation data + self.nobs = 0 + self.nobs_qty = 0 + self.nobs_var = 0 + self.obs_qty = [] # (nobs_qty) integer, index of self.obs_qty_table + self.obs_hid = [] # (nobs) integer + self.obs_vid = [] # (nobs) integer, veriable index from self.obs_var_table or GRIB code + self.obs_lvl = [] # (nobs) float + self.obs_hgt = [] # (nobs) float + self.obs_val = [] # (nobs) float + self.obs_qty_table = [] # (nobs_qty, mxstr) string + self.obs_var_table = [] # (nobs_var, mxstr2) string, required if self.use_var_id is True + self.obs_var_unit = [] # (nobs_var, mxstr2) string, optional if self.use_var_id is True + self.obs_var_desc = [] # (nobs_var, mxstr3) string, optional if self.use_var_id is True + + # Optional variables for PREPBUFR, not supported yet + self.hdr_prpt_typ = [] # optional + self.hdr_irpt_typ = [] # optional + self.hdr_inst_typ = [] # optional + + def add_error_msg(self, error_msg): + self.has_error = True + self.log_error_msg(error_msg) + if 0 == len(self.error_msg): + self.error_msg = error_msg + else: + self.error_msg = "{m1}\n{m2}".format(m1=self.error_msg, m2=error_msg) + + def add_error_msgs(self, error_msgs): + self.has_error = True + for error_msg in error_msgs: + self.add_error_msg(error_msg) + + def check_data_member_float(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (float)".format(v=var_name)) + elif isinstance(local_var, list): + if isinstance(local_var[0], str) and not self.is_number(local_var[0]): + self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int or float only)".format( + n=var_name, v=local_var[0])) + elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], (int, float)): + self.add_error_msg("Not supported data type ({t}) for {v}[0] (int or float only)".format( + v=var_name, t=type(local_var[0]))) + elif not self.is_numpy_array(local_var): + self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( + v=var_name, t=type(local_var))) + + def check_data_member_int(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (int)".format(v=var_name)) + elif isinstance(local_var, list): + if isinstance(local_var[0], str) and not self.is_number(local_var[0]): + self.add_error_msg("Not supported data type: {n}[0]={v}, string type, not a number (int only)".format( + n=var_name, v=local_var[0])) + elif 0 > str(type(local_var[0])).find('numpy') and not isinstance(local_var[0], int): + self.add_error_msg("Not supported data type ({t}) for {v}[0] (int only)".format( + v=var_name, t=type(local_var[0]))) + elif not self.is_numpy_array(local_var): + self.add_error_msg("Not supported data type ({t}) for {v} (list and numpy.ndarray)".format( + v=var_name, t=type(local_var))) + + def check_data_member_string(self, local_var, var_name): + if 0 == len(local_var): + self.add_error_msg("{v} is empty (string)".format(v=var_name)) + elif not isinstance(local_var, (list)): + self.add_error_msg("Not supported data type ({t}) for {v} (list)".format( + v=var_name, t=type(local_var))) + + def check_point_data(self): + if not self.ignore_input_file and self.input_name is not None and not os.path.exists(self.input_name): + self.add_error_msg('The netcdf input {f} does not exist'.format(f=self.input_name)) + else: + self.check_data_member_int(self.hdr_typ,'hdr_typ') + self.check_data_member_int(self.hdr_sid,'hdr_sid') + self.check_data_member_int(self.hdr_vld,'hdr_vld') + self.check_data_member_float(self.hdr_lat,'hdr_lat') + self.check_data_member_float(self.hdr_lon,'hdr_lon') + self.check_data_member_float(self.hdr_elv,'hdr_elv') + self.check_data_member_string(self.hdr_typ_table,'hdr_typ_table') + self.check_data_member_string(self.hdr_sid_table,'hdr_sid_table') + self.check_data_member_string(self.hdr_vld_table,'hdr_vld_table') + + self.check_data_member_int(self.obs_qty,'obs_qty') + self.check_data_member_int(self.obs_hid,'obs_hid') + self.check_data_member_int(self.obs_vid,'obs_vid') + self.check_data_member_float(self.obs_lvl,'obs_lvl') + self.check_data_member_float(self.obs_hgt,'obs_hgt') + self.check_data_member_float(self.obs_val,'obs_val') + self.check_data_member_string(self.obs_qty_table,'obs_qty_table') + if self.use_var_id: + self.check_data_member_string(self.obs_var_table,'obs_var_table') + + def convert_to_numpy(self, value_list): + return np.array(value_list) + + def dump(self): + base_met_point_obs.print_point_data(self.get_point_data()) + + def get_count_string(self): + return f' nobs={self.nobs} nhdr={self.nhdr} ntyp={self.nhdr_typ} nsid={self.nhdr_sid} nvld={self.nhdr_vld} nqty={self.nobs_qty} nvar={self.nobs_var}' + + def get_point_data(self): + if self.nhdr <= 0: + self.nhdr = len(self.hdr_lat) + if self.nobs <= 0: + self.nobs = len(self.obs_val) + if self.nhdr_typ <= 0: + self.nhdr_typ = len(self.hdr_typ_table) + if self.nhdr_sid <= 0: + self.nhdr_sid = len(self.hdr_sid_table) + if self.nhdr_vld <= 0: + self.nhdr_vld = len(self.hdr_vld_table) + if self.npbhdr <= 0: + self.npbhdr = len(self.hdr_prpt_typ) + if self.nobs_qty <= 0: + self.nobs_qty = len(self.obs_qty_table) + if self.nobs_var <= 0: + self.nobs_var = len(self.obs_var_table) + self.check_point_data() + + if not self.is_numpy_array(self.hdr_typ): + self.hdr_typ = self.convert_to_numpy(self.hdr_typ) + if not self.is_numpy_array(self.hdr_sid): + self.hdr_sid = self.convert_to_numpy(self.hdr_sid) + if not self.is_numpy_array(self.hdr_vld): + self.hdr_vld = self.convert_to_numpy(self.hdr_vld) + if not self.is_numpy_array(self.hdr_lat): + self.hdr_lat = self.convert_to_numpy(self.hdr_lat) + if not self.is_numpy_array(self.hdr_lon): + self.hdr_lon = self.convert_to_numpy(self.hdr_lon) + if not self.is_numpy_array(self.hdr_elv): + self.hdr_elv = self.convert_to_numpy(self.hdr_elv) + + if not self.is_numpy_array(self.obs_qty): + self.obs_qty = self.convert_to_numpy(self.obs_qty) + if not self.is_numpy_array(self.obs_hid): + self.obs_hid = self.convert_to_numpy(self.obs_hid) + if not self.is_numpy_array(self.obs_vid): + self.obs_vid = self.convert_to_numpy(self.obs_vid) + if not self.is_numpy_array(self.obs_lvl): + self.obs_lvl = self.convert_to_numpy(self.obs_lvl) + if not self.is_numpy_array(self.obs_hgt): + self.obs_hgt = self.convert_to_numpy(self.obs_hgt) + if not self.is_numpy_array(self.obs_val): + self.obs_val = self.convert_to_numpy(self.obs_val) + + self.count_info = self.get_count_string() + self.met_point_data = self + return self.__dict__ + + def is_number(self, num_str): + return num_str.replace('-','1').replace('+','2').replace('.','3').isdigit() + + def is_numpy_array(self, var): + return isinstance(var, np.ndarray) + + def log_error_msg(self, err_msg): + base_met_point_obs.error_msg(err_msg) + + def log_error(self, err_msgs): + print(self.ERROR_P) + for err_line in err_msgs.split('\n'): + self.log_error_msg(err_line) + print(self.ERROR_P) + + def log_info(self, info_msg): + base_met_point_obs.info_msg(info_msg) + + def put_data(self, point_obs_dict): + self.use_var_id = point_obs_dict['use_var_id'] + self.hdr_typ = point_obs_dict['hdr_typ'] + self.hdr_sid = point_obs_dict['hdr_sid'] + self.hdr_vld = point_obs_dict['hdr_vld'] + self.hdr_lat = point_obs_dict['hdr_lat'] + self.hdr_lon = point_obs_dict['hdr_lon'] + self.hdr_elv = point_obs_dict['hdr_elv'] + self.hdr_typ_table = point_obs_dict['hdr_typ_table'] + self.hdr_sid_table = point_obs_dict['hdr_sid_table'] + self.hdr_vld_table = point_obs_dict['hdr_vld_table'] + + #Observation data + self.obs_qty = point_obs_dict['obs_qty'] + self.obs_hid = point_obs_dict['obs_hid'] + self.obs_lvl = point_obs_dict['obs_lvl'] + self.obs_hgt = point_obs_dict['obs_hgt'] + self.obs_val = point_obs_dict['obs_val'] + self.obs_vid = point_obs_dict['obs_vid'] + self.obs_var_table = point_obs_dict['obs_var_table'] + self.obs_qty_table = point_obs_dict['obs_qty_table'] + po_array = point_obs_dict.get('obs_unit', None) + if po_array is not None: + self.obs_var_unit = po_array + po_array = point_obs_dict.get('obs_desc', None) + if po_array is not None: + self.obs_var_desc = po_array + + po_array = point_obs_dict.get('hdr_prpt_typ', None) + if po_array is not None: + self.hdr_prpt_typ = po_array + po_array = point_obs_dict.get('hdr_irpt_typ', None) + if po_array is not None: + self.hdr_irpt_typ = po_array + po_array = point_obs_dict.get('hdr_inst_typ', None) + if po_array is not None: + self.hdr_inst_typ = po_array + + @staticmethod + def get_prompt(): + return " python:" + + @staticmethod + def error_msg(msg): + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.ERROR_P} {msg}') + + @staticmethod + def info_msg(msg): + print(f'{base_met_point_obs.get_prompt()} {base_met_point_obs.INFO_P} {msg}') + + @staticmethod + def get_python_script(arg_value): + return arg_value[len(met_point_obs.python_prefix)+1:] + + @staticmethod + def is_python_script(arg_value): + return arg_value.startswith(met_point_obs.python_prefix) + + @staticmethod + def print_data(key, data_array, show_count=COUNT_SHOW): + if isinstance(data_array, list): + data_len = len(data_array) + if show_count >= data_len: + print(" {k:10s}: {v}".format(k=key, v= data_array)) + else: + end_offset = int(show_count/2) + print(" {k:10s}: count={v}".format(k=key, v=data_len)) + print(" {k:10s}[0:{o}] {v}".format(k=key, v=data_array[:end_offset], o=end_offset)) + print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v='...', s=end_offset+1, e=data_len-end_offset-1)) + print(" {k:10s}[{s}:{e}]: {v}".format(k=key, v= data_array[-end_offset:], s=(data_len-end_offset), e=(data_len-1))) + else: + print(" {k:10s}: {v}".format(k=key, v= data_array)) + + @staticmethod + def print_point_data(met_point_data, print_subset=True): + print(' === MET point data by python embedding ===') + if print_subset: + met_point_obs.print_data('nhdr',met_point_data['nhdr']) + met_point_obs.print_data('nobs',met_point_data['nobs']) + met_point_obs.print_data('use_var_id',met_point_data['use_var_id']) + met_point_obs.print_data('hdr_typ',met_point_data['hdr_typ']) + met_point_obs.print_data('hdr_typ_table',met_point_data['hdr_typ_table']) + met_point_obs.print_data('hdr_sid',met_point_data['hdr_sid']) + met_point_obs.print_data('hdr_sid_table',met_point_data['hdr_sid_table']) + met_point_obs.print_data('hdr_vld',met_point_data['hdr_vld']) + met_point_obs.print_data('hdr_vld_table',met_point_data['hdr_vld_table']) + met_point_obs.print_data('hdr_lat',met_point_data['hdr_lat']) + met_point_obs.print_data('hdr_lon',met_point_data['hdr_lon']) + met_point_obs.print_data('hdr_elv',met_point_data['hdr_elv']) + met_point_obs.print_data('obs_hid',met_point_data['obs_hid']) + met_point_obs.print_data('obs_vid',met_point_data['obs_vid']) + met_point_obs.print_data('obs_var_table',met_point_data['obs_var_table']) + met_point_obs.print_data('obs_qty',met_point_data['obs_qty']) + met_point_obs.print_data('obs_qty_table',met_point_data['obs_qty_table']) + met_point_obs.print_data('obs_lvl',met_point_data['obs_lvl']) + met_point_obs.print_data('obs_hgt',met_point_data['obs_hgt']) + met_point_obs.print_data('obs_val',met_point_data['obs_val']) + else: + print('All',met_point_data) + print(" nhdr: ",met_point_data['nhdr']) + print(" nobs: ",met_point_data['nobs']) + print(' use_var_id: ',met_point_data['use_var_id']) + print(' hdr_typ: ',met_point_data['hdr_typ']) + print('hdr_typ_table: ',met_point_data['hdr_typ_table']) + print(' hdr_sid: ',met_point_data['hdr_sid']) + print('hdr_sid_table: ',met_point_data['hdr_sid_table']) + print(' hdr_vld: ',met_point_data['hdr_vld']) + print('hdr_vld_table: ',met_point_data['hdr_vld_table']) + print(' hdr_lat: ',met_point_data['hdr_lat']) + print(' hdr_lon: ',met_point_data['hdr_lon']) + print(' hdr_elv: ',met_point_data['hdr_elv']) + print(' obs_hid: ',met_point_data['obs_hid']) + print(' obs_vid: ',met_point_data['obs_vid']) + print('obs_var_table: ',met_point_data['obs_var_table']) + print(' obs_qty: ',met_point_data['obs_qty']) + print('obs_qty_table: ',met_point_data['obs_qty_table']) + print(' obs_lvl: ',met_point_data['obs_lvl']) + print(' obs_hgt: ',met_point_data['obs_hgt']) + print(' obs_val: ',met_point_data['obs_val']) + + print(' === MET point data by python embedding ===') class csv_point_obs(base_met_point_obs): - def __init__(self, point_data): - self.point_data = point_data - super(csv_point_obs, self).__init__() - - self.obs_cnt = obs_cnt = len(point_data) - self.obs_qty = [ 0 for _ in range(0, obs_cnt) ] # (nobs_qty) integer, index of self.obs_qty_table - self.obs_hid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer - self.obs_vid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer, veriable index from self.obs_var_table or GRIB code - self.obs_lvl = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - self.obs_hgt = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - self.obs_val = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float - - self.convert_point_data() - - def check_csv_record(self, csv_point_data, index): - error_msgs = [] - # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - # dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'} - if 11 > len(csv_point_data): - error_msgs.append("{i}-th data: missing columns. should be 11 columns, not {c} columns".format( - i=index, c=len(csv_point_data))) - elif 11 < len(csv_point_data): - print("{i}-th data: ignore after 11-th columns out of {c} columns".format( - i=index, c=len(csv_point_data))) - if not isinstance(csv_point_data[0], str): - error_msgs.append("{i}-th data: message_type is not string".format(i=index)) - if not isinstance(csv_point_data[1], str): - error_msgs.append("{i}-th data: station_id is not string".format(i=index)) - if not isinstance(csv_point_data[2], str): - error_msgs.append("{i}-th data: valid_time is not string".format(i=index)) - if isinstance(csv_point_data[3], str): - error_msgs.append("{i}-th data: latitude can not be a string".format(i=index)) - elif csv_point_data[3] < -90.0 or csv_point_data[3] > 90.0: - error_msgs.append("{i}-th data: latitude ({l}) is out of range".format(i=index, l=csv_point_data[3])) - if isinstance(csv_point_data[4], str): - error_msgs.append("{i}-th data: longitude can not be a string".format(i=index)) - elif csv_point_data[4] < -180.0 or csv_point_data[4] > 360.0: - error_msgs.append("{i}-th data: longitude ({l}) is out of range".format(i=index, l=csv_point_data[4])) - if not isinstance(csv_point_data[6], str): - error_msgs.append("{i}-th data: grib_code/var_name is not string".format(i=index)) - if not isinstance(csv_point_data[9], str): - error_msgs.append("{i}-th data: quality_mark is not string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[5]) - if is_string and not is_num: - error_msgs.append("{i}-th data: elevation: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[7]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_level: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[8]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_height: only NA is accepted as string".format(i=index)) - is_string, is_num = self.is_num_string(csv_point_data[10]) - if is_string and not is_num: - error_msgs.append("{i}-th data: obs_value: only NA is accepted as string".format(i=index)) - return error_msgs - - def check_csv_point_data(self, all_records=False): - if 0 == len(self.point_data): - self.add_error_msg("No data!") - elif all_records: - data_idx = 0 - for csv_point_data in self.point_data: - data_idx += 1 - error_messages = self.check_csv_record(csv_point_data, data_idx) - if len(error_messages) > 0: - self.add_error_msgs(error_messages) - else: - error_messages = self.check_csv_record(self.point_data[0], index=1) + def __init__(self, point_data): + self.point_data = point_data + super(csv_point_obs, self).__init__() + + self.obs_cnt = obs_cnt = len(point_data) + self.obs_qty = [ 0 for _ in range(0, obs_cnt) ] # (nobs_qty) integer, index of self.obs_qty_table + self.obs_hid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer + self.obs_vid = [ 0 for _ in range(0, obs_cnt) ] # (nobs) integer, veriable index from self.obs_var_table or GRIB code + self.obs_lvl = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + self.obs_hgt = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + self.obs_val = [ self.FILL_VALUE for _ in range(0, obs_cnt) ] # (nobs) float + + self.convert_point_data() + + def check_csv_record(self, csv_point_data, index): + error_msgs = [] + # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + # dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'} + if 11 > len(csv_point_data): + error_msgs.append("{i}-th data: missing columns. should be 11 columns, not {c} columns".format( + i=index, c=len(csv_point_data))) + elif 11 < len(csv_point_data): + print("{i}-th data: ignore after 11-th columns out of {c} columns".format( + i=index, c=len(csv_point_data))) + if not isinstance(csv_point_data[0], str): + error_msgs.append("{i}-th data: message_type is not string".format(i=index)) + if not isinstance(csv_point_data[1], str): + error_msgs.append("{i}-th data: station_id is not string".format(i=index)) + if not isinstance(csv_point_data[2], str): + error_msgs.append("{i}-th data: valid_time is not string".format(i=index)) + if isinstance(csv_point_data[3], str): + error_msgs.append("{i}-th data: latitude can not be a string".format(i=index)) + elif csv_point_data[3] < -90.0 or csv_point_data[3] > 90.0: + error_msgs.append("{i}-th data: latitude ({l}) is out of range".format(i=index, l=csv_point_data[3])) + if isinstance(csv_point_data[4], str): + error_msgs.append("{i}-th data: longitude can not be a string".format(i=index)) + elif csv_point_data[4] < -180.0 or csv_point_data[4] > 360.0: + error_msgs.append("{i}-th data: longitude ({l}) is out of range".format(i=index, l=csv_point_data[4])) + if not isinstance(csv_point_data[6], str): + error_msgs.append("{i}-th data: grib_code/var_name is not string".format(i=index)) + if not isinstance(csv_point_data[9], str): + error_msgs.append("{i}-th data: quality_mark is not string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[5]) + if is_string and not is_num: + error_msgs.append("{i}-th data: elevation: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[7]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_level: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[8]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_height: only NA is accepted as string".format(i=index)) + is_string, is_num = self.is_num_string(csv_point_data[10]) + if is_string and not is_num: + error_msgs.append("{i}-th data: obs_value: only NA is accepted as string".format(i=index)) + return error_msgs + + def check_csv_point_data(self, all_records=False): + if 0 == len(self.point_data): + self.add_error_msg("No data!") + elif all_records: + data_idx = 0 + for csv_point_data in self.point_data: + data_idx += 1 + error_messages = self.check_csv_record(csv_point_data, data_idx) if len(error_messages) > 0: - self.add_error_msgs(error_messages) - if 1 < len(self.point_data): - error_messages = self.check_csv_record(self.point_data[-1], index=len(self.point_data)) - if len(error_messages) > 0: - self.add_error_msgs(error_messages) - - def convert_point_data(self): - hdr_cnt = hdr_typ_cnt = hdr_sid_cnt = hdr_vld_cnt = 0 - var_name_cnt = qc_cnt = 0 - - hdr_map = {} - hdr_typ_map = {} - hdr_sid_map = {} - hdr_vld_map = {} - obs_var_map = {} - obs_qty_map = {} - self.use_var_id = not self.is_grib_code() - - index = 0 - #names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - for csv_point_record in self.point_data: - # Build header map. - hdr_typ_str = csv_point_record[0] - hdr_typ_idx = hdr_typ_map.get(hdr_typ_str,-1) - if hdr_typ_idx < 0: - hdr_typ_idx = hdr_typ_cnt - hdr_typ_map[hdr_typ_str] = hdr_typ_idx - hdr_typ_cnt += 1 - - hdr_sid_str = csv_point_record[1] - hdr_sid_idx = hdr_sid_map.get(hdr_sid_str,-1) - if hdr_sid_idx < 0: - hdr_sid_idx = hdr_sid_cnt - hdr_sid_map[hdr_sid_str] = hdr_sid_idx - hdr_sid_cnt += 1 - - hdr_vld_str = csv_point_record[2] - hdr_vld_idx = hdr_vld_map.get(hdr_vld_str,-1) - if hdr_vld_idx < 0: - hdr_vld_idx = hdr_vld_cnt - hdr_vld_map[hdr_vld_str] = hdr_vld_idx - hdr_vld_cnt += 1 - - lat = csv_point_record[3] - lon = csv_point_record[4] - elv = self.get_num_value(csv_point_record[5] ) - hdr_key = (hdr_typ_idx,hdr_sid_idx,hdr_vld_idx,lat,lon,elv) - hdr_idx = hdr_map.get(hdr_key,-1) - if hdr_idx < 0: - hdr_idx = hdr_cnt - hdr_map[hdr_key] = hdr_idx - hdr_cnt += 1 - - var_id_str = csv_point_record[6] - if self.use_var_id: - var_id = obs_var_map.get(var_id_str,-1) - if var_id < 0: - var_id = var_name_cnt - obs_var_map[var_id_str] = var_id - var_name_cnt += 1 - else: - var_id = int(var_id_str) - - qc_str = csv_point_record[9] - qc_id = obs_qty_map.get(qc_str,-1) - if qc_id < 0: - qc_id = qc_cnt - obs_qty_map[qc_str] = qc_id - qc_cnt += 1 - - # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] - self.obs_vid[index] = var_id - self.obs_hid[index] = hdr_idx - self.obs_lvl[index] = self.get_num_value(csv_point_record[7]) - self.obs_hgt[index] = self.get_num_value(csv_point_record[8]) - self.obs_val[index] = self.get_num_value(csv_point_record[10]) - self.obs_qty[index] = qc_id - - index += 1 - - self.nhdr = hdr_cnt - self.nhdr_typ = hdr_typ_cnt - self.nhdr_sid = hdr_sid_cnt - self.nhdr_vld = hdr_vld_cnt - self.nobs_var = var_name_cnt - self.nobs_qty = qc_cnt - - # Fill header array and table array based on the map - self.hdr_typ = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_sid = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_vld = [ 0 for _ in range(0, hdr_cnt) ] - self.hdr_lat = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - self.hdr_lon = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - self.hdr_elv = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] - for key, idx in hdr_map.items(): - self.hdr_typ[idx] = key[0] - self.hdr_sid[idx] = key[1] - self.hdr_vld[idx] = key[2] - self.hdr_lat[idx] = key[3] - self.hdr_lon[idx] = key[4] - self.hdr_elv[idx] = key[5] - - self.hdr_typ_table = [ "" for _ in range(0, hdr_typ_cnt) ] - self.hdr_sid_table = [ "" for _ in range(0, hdr_sid_cnt) ] - self.hdr_vld_table = [ "" for _ in range(0, hdr_vld_cnt) ] - self.obs_qty_table = [ "" for _ in range(0, qc_cnt) ] - self.obs_var_table = [ "" for _ in range(0, var_name_cnt) ] - for key, idx in hdr_typ_map.items(): - self.hdr_typ_table[idx] = key - for key, idx in hdr_sid_map.items(): - self.hdr_sid_table[idx] = key - for key, idx in hdr_vld_map.items(): - self.hdr_vld_table[idx] = key - for key, idx in obs_qty_map.items(): - self.obs_qty_table[idx] = key - for key, idx in obs_var_map.items(): - self.obs_var_table[idx] = key - - def get_num_value(self, column_value): - num_value = column_value - if isinstance(column_value, str): - if self.is_number(column_value): - num_value = float(column_value) - else: - num_value = self.FILL_VALUE - if column_value.lower() != 'na' and column_value.lower() != 'n/a': - self.log_info(f'{column_value} is not a number, converted to the missing value') - return num_value - - def is_grib_code(self): - grib_code = True - for _point_data in self.point_data: - if isinstance(_point_data[6], int): - continue - elif isinstance(_point_data[6], str) and not _point_data[6].isdecimal(): - grib_code = False - break; - return grib_code - - def is_num_string(self, column_value): - is_string = isinstance(column_value, str) - if is_string: - is_num = True if self.is_number(column_value) or column_value.lower() == 'na' or column_value.lower() == 'n/a' else False - else: - is_num = True - return is_string, is_num + self.add_error_msgs(error_messages) + else: + error_messages = self.check_csv_record(self.point_data[0], index=1) + if len(error_messages) > 0: + self.add_error_msgs(error_messages) + if 1 < len(self.point_data): + error_messages = self.check_csv_record(self.point_data[-1], index=len(self.point_data)) + if len(error_messages) > 0: + self.add_error_msgs(error_messages) + + def convert_point_data(self): + hdr_cnt = hdr_typ_cnt = hdr_sid_cnt = hdr_vld_cnt = 0 + var_name_cnt = qc_cnt = 0 + + hdr_map = {} + hdr_typ_map = {} + hdr_sid_map = {} + hdr_vld_map = {} + obs_var_map = {} + obs_qty_map = {} + self.use_var_id = not self.is_grib_code() + + index = 0 + #names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + for csv_point_record in self.point_data: + # Build header map. + hdr_typ_str = csv_point_record[0] + hdr_typ_idx = hdr_typ_map.get(hdr_typ_str,-1) + if hdr_typ_idx < 0: + hdr_typ_idx = hdr_typ_cnt + hdr_typ_map[hdr_typ_str] = hdr_typ_idx + hdr_typ_cnt += 1 + + hdr_sid_str = csv_point_record[1] + hdr_sid_idx = hdr_sid_map.get(hdr_sid_str,-1) + if hdr_sid_idx < 0: + hdr_sid_idx = hdr_sid_cnt + hdr_sid_map[hdr_sid_str] = hdr_sid_idx + hdr_sid_cnt += 1 + + hdr_vld_str = csv_point_record[2] + hdr_vld_idx = hdr_vld_map.get(hdr_vld_str,-1) + if hdr_vld_idx < 0: + hdr_vld_idx = hdr_vld_cnt + hdr_vld_map[hdr_vld_str] = hdr_vld_idx + hdr_vld_cnt += 1 + + lat = csv_point_record[3] + lon = csv_point_record[4] + elv = self.get_num_value(csv_point_record[5] ) + hdr_key = (hdr_typ_idx,hdr_sid_idx,hdr_vld_idx,lat,lon,elv) + hdr_idx = hdr_map.get(hdr_key,-1) + if hdr_idx < 0: + hdr_idx = hdr_cnt + hdr_map[hdr_key] = hdr_idx + hdr_cnt += 1 + + var_id_str = csv_point_record[6] + if self.use_var_id: + var_id = obs_var_map.get(var_id_str,-1) + if var_id < 0: + var_id = var_name_cnt + obs_var_map[var_id_str] = var_id + var_name_cnt += 1 + else: + var_id = int(var_id_str) + + qc_str = csv_point_record[9] + qc_id = obs_qty_map.get(qc_str,-1) + if qc_id < 0: + qc_id = qc_cnt + obs_qty_map[qc_str] = qc_id + qc_cnt += 1 + + # names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'] + self.obs_vid[index] = var_id + self.obs_hid[index] = hdr_idx + self.obs_lvl[index] = self.get_num_value(csv_point_record[7]) + self.obs_hgt[index] = self.get_num_value(csv_point_record[8]) + self.obs_val[index] = self.get_num_value(csv_point_record[10]) + self.obs_qty[index] = qc_id + + index += 1 + + self.nhdr = hdr_cnt + self.nhdr_typ = hdr_typ_cnt + self.nhdr_sid = hdr_sid_cnt + self.nhdr_vld = hdr_vld_cnt + self.nobs_var = var_name_cnt + self.nobs_qty = qc_cnt + + # Fill header array and table array based on the map + self.hdr_typ = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_sid = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_vld = [ 0 for _ in range(0, hdr_cnt) ] + self.hdr_lat = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + self.hdr_lon = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + self.hdr_elv = [ self.FILL_VALUE for _ in range(0, hdr_cnt) ] + for key, idx in hdr_map.items(): + self.hdr_typ[idx] = key[0] + self.hdr_sid[idx] = key[1] + self.hdr_vld[idx] = key[2] + self.hdr_lat[idx] = key[3] + self.hdr_lon[idx] = key[4] + self.hdr_elv[idx] = key[5] + + self.hdr_typ_table = [ "" for _ in range(0, hdr_typ_cnt) ] + self.hdr_sid_table = [ "" for _ in range(0, hdr_sid_cnt) ] + self.hdr_vld_table = [ "" for _ in range(0, hdr_vld_cnt) ] + self.obs_qty_table = [ "" for _ in range(0, qc_cnt) ] + self.obs_var_table = [ "" for _ in range(0, var_name_cnt) ] + for key, idx in hdr_typ_map.items(): + self.hdr_typ_table[idx] = key + for key, idx in hdr_sid_map.items(): + self.hdr_sid_table[idx] = key + for key, idx in hdr_vld_map.items(): + self.hdr_vld_table[idx] = key + for key, idx in obs_qty_map.items(): + self.obs_qty_table[idx] = key + for key, idx in obs_var_map.items(): + self.obs_var_table[idx] = key + + def get_num_value(self, column_value): + num_value = column_value + if isinstance(column_value, str): + if self.is_number(column_value): + num_value = float(column_value) + else: + num_value = self.FILL_VALUE + if column_value.lower() != 'na' and column_value.lower() != 'n/a': + self.log_info(f'{column_value} is not a number, converted to the missing value') + return num_value + + def is_grib_code(self): + grib_code = True + for _point_data in self.point_data: + if isinstance(_point_data[6], int): + continue + elif isinstance(_point_data[6], str) and not _point_data[6].isdecimal(): + grib_code = False + break; + return grib_code + + def is_num_string(self, column_value): + is_string = isinstance(column_value, str) + if is_string: + is_num = True if self.is_number(column_value) or column_value.lower() == 'na' or column_value.lower() == 'n/a' else False + else: + is_num = True + return is_string, is_num class met_point_obs(ABC, base_met_point_obs): - MET_ENV_RUN = 'MET_FORCE_TO_RUN' - - @abstractmethod - def read_data(self, args): - # args can be input_file_name, list, or dictionary - # - The variables at __init__ should be filled as python list or numpy array - # - set self.input_name - # - # Here is a template - ''' - if isinstance(args, dict): - in_filename = args.get('in_name',None) - elif isinstance(args, list): - in_filename = args[0] - else: - in_filename = args - self.input_name = in_filename - ''' - pass + MET_ENV_RUN = 'MET_FORCE_TO_RUN' + + @abstractmethod + def read_data(self, args): + # args can be input_file_name, list, or dictionary + # - The variables at __init__ should be filled as python list or numpy array + # - set self.input_name + # + # Here is a template + ''' + if isinstance(args, dict): + in_filename = args.get('in_name',None) + elif isinstance(args, list): + in_filename = args[0] + else: + in_filename = args + self.input_name = in_filename + ''' + pass class met_point_tools(): - @staticmethod - def convert_point_data(point_data, check_all_records=False, input_type='csv'): - convert_point_data(point_data, check_all_records, input_type) - - @staticmethod - def get_prompt(): - return " python:" - - @staticmethod - def get_nc_point_obs(): - return nc_point_obs() - - @staticmethod - def get_sample_point_obs(): - return sample_met_point_obs() - - @staticmethod - def is_python_prefix(user_cmd): - return user_cmd.startswith(base_met_point_obs.python_prefix) - - @staticmethod - # Read the input file which is 11 column text file as the first argument - def read_text_point_obs(input_file, header=None, - delim_whitespace=True, keep_default_na=False): - # Read and format the input 11-column observations: - # (1) string: Message_Type - # (2) string: Station_ID - # (3) string: Valid_Time(YYYYMMDD_HHMMSS) - # (4) numeric: Lat(Deg North) - # (5) numeric: Lon(Deg East) - # (6) numeric: Elevation(msl) - # (7) string: Var_Name(or GRIB_Code) - # (8) numeric: Level - # (9) numeric: Height(msl or agl) - # (10) string: QC_String - # (11) numeric: Observation_Value - ascii_point_data = pd.read_csv(input_file, header=header, - delim_whitespace=delim_whitespace, - keep_default_na=keep_default_na, - names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], - dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() - return ascii_point_data - + @staticmethod + def convert_point_data(point_data, check_all_records=False, input_type='csv'): + tmp_point_data = {} + if 'csv' == input_type: + csv_point_data = csv_point_obs(point_data) + csv_point_data.check_csv_point_data(check_all_records) + tmp_point_data = csv_point_data.get_point_data() + else: + base_met_point_obs.error_msg('Not supported input type: {input_type}') + return tmp_point_data + + @staticmethod + def get_prompt(): + return " python:" + + @staticmethod + def get_nc_point_obs(): + return nc_point_obs() + + @staticmethod + def get_sample_point_obs(): + return sample_met_point_obs() + + @staticmethod + def is_python_prefix(user_cmd): + return user_cmd.startswith(base_met_point_obs.python_prefix) + + @staticmethod + # Read the input file which is 11 column text file as the first argument + def read_text_point_obs(input_file, header=None, + delim_whitespace=True, keep_default_na=False): + # Read and format the input 11-column observations: + # (1) string: Message_Type + # (2) string: Station_ID + # (3) string: Valid_Time(YYYYMMDD_HHMMSS) + # (4) numeric: Lat(Deg North) + # (5) numeric: Lon(Deg East) + # (6) numeric: Elevation(msl) + # (7) string: Var_Name(or GRIB_Code) + # (8) numeric: Level + # (9) numeric: Height(msl or agl) + # (10) string: QC_String + # (11) numeric: Observation_Value + ascii_point_data = pd.read_csv(input_file, header=header, + delim_whitespace=delim_whitespace, + keep_default_na=keep_default_na, + names=['typ', 'sid', 'vld', 'lat', 'lon', 'elv', 'var', 'lvl', 'hgt', 'qc', 'obs'], + dtype={'typ':'str', 'sid':'str', 'vld':'str', 'var':'str', 'qc':'str'}).values.tolist() + return ascii_point_data # Note: caller should import netCDF4 -# the argements nc_group(dataset) and nc_var should not be None +# The argements nc_group(dataset) and nc_var should not be None class nc_tools(): - met_missing = -99999999. + met_missing = -99999999. - @staticmethod - def get_num_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_var[:] + @staticmethod + def get_num_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_var[:] - @staticmethod - def get_ncbyte_array_to_str(nc_var): - nc_str_data = nc_var[:] - if nc_var.datatype.name == 'bytes8': - nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] - return nc_str_data + @staticmethod + def get_ncbyte_array_to_str(nc_var): + nc_str_data = nc_var[:] + if nc_var.datatype.name == 'bytes8': + nc_str_data = [ str(s.compressed(),"utf-8") for s in nc_var[:] ] + return nc_str_data - @staticmethod - def get_string_array(nc_group, var_name): - nc_var = nc_group.variables.get(var_name, None) - return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) + @staticmethod + def get_string_array(nc_group, var_name): + nc_var = nc_group.variables.get(var_name, None) + return [] if nc_var is None else nc_tools.get_ncbyte_array_to_str(nc_var) class nc_point_obs(met_point_obs): - # args should be string, list, or dictionary - def get_nc_filename(self, args): - nc_filename = None - if isinstance(args, dict): - nc_filename = args.get('nc_name',None) - elif isinstance(args, list): - nc_filename = args[0] - elif args != ARG_PRINT_DATA: - nc_filename = args - - return nc_filename - - def read_data(self, nc_filename): - if nc_filename is None: - self.log_error_msg("The input NetCDF filename is missing") - elif not os.path.exists(nc_filename): - self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") - else: - dataset = nc.Dataset(nc_filename, 'r') - - attr_name = 'use_var_id' - use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" - self.use_var_id = use_var_id_str.lower() == 'true' - - # Header - self.hdr_typ = dataset['hdr_typ'][:] - self.hdr_sid = dataset['hdr_sid'][:] - self.hdr_vld = dataset['hdr_vld'][:] - self.hdr_lat = dataset['hdr_lat'][:] - self.hdr_lon = dataset['hdr_lon'][:] - self.hdr_elv = dataset['hdr_elv'][:] - self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') - self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') - self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') - - nc_var = dataset.variables.get('obs_unit', None) - if nc_var: - self.obs_var_unit = nc_var[:] - nc_var = dataset.variables.get('obs_desc', None) - if nc_var: - self.obs_var_desc = nc_var[:] - - nc_var = dataset.variables.get('hdr_prpt_typ', None) - if nc_var: - self.hdr_prpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_irpt_typ', None) - if nc_var: - self.hdr_irpt_typ = nc_var[:] - nc_var = dataset.variables.get('hdr_inst_typ', None) - if nc_var: - self.hdr_inst_typ =nc_var[:] - - #Observation data - self.hdr_sid = dataset['hdr_sid'][:] - self.obs_qty = np.array(dataset['obs_qty'][:]) - self.obs_hid = np.array(dataset['obs_hid'][:]) - self.obs_lvl = np.array(dataset['obs_lvl'][:]) - self.obs_hgt = np.array(dataset['obs_hgt'][:]) - self.obs_val = np.array(dataset['obs_val'][:]) - nc_var = dataset.variables.get('obs_vid', None) - if nc_var is None: - self.use_var_id = False - nc_var = dataset.variables.get('obs_gc', None) - else: - self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') - if nc_var: - self.obs_vid = np.array(nc_var[:]) - - self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') - - def save_ncfile(self, nc_filename): - met_data = self.get_point_data() - with nc.Dataset(nc_filename, 'w') as nc_dataset: - self.set_nc_data(nc_dataset) - return met_data - - def set_nc_data(self, nc_dataset): - return nc_point_obs.write_nc_data(nc_dataset, self) - - @staticmethod - def write_nc_file(nc_filename, point_obs): - with nc.Dataset(nc_filename, 'w') as nc_dataset: - nc_point_obs.set_nc_data(nc_dataset, point_obs) - - @staticmethod - def write_nc_data(nc_dataset, point_obs): - do_nothing = False - if 0 == point_obs.nhdr: - do_nothing = True - base_met_point_obs.info_msg("the header is empty") - if 0 == point_obs.nobs: - do_nothing = True - base_met_point_obs.info_msg("the observation data is empty") - if do_nothing: - print() - return - - # Set global attributes - nc_dataset.MET_Obs_version = "1.02" ; - nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" - - # Create dimensions - nc_dataset.createDimension('mxstr', 16) - nc_dataset.createDimension('mxstr2', 40) - nc_dataset.createDimension('mxstr3', 80) - nc_dataset.createDimension('nhdr', point_obs.nhdr) - nc_dataset.createDimension('nobs', point_obs.nobs) - #npbhdr = len(point_obs.hdr_prpt_typ) - if 0 < point_obs.npbhdr: - nc_dataset.createDimension('npbhdr', point_obs.npbhdr) - nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) - nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) - nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) - nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) - nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) - - type_for_string = 'S1' # np.byte - dims_hdr = ('nhdr',) - dims_obs = ('nobs',) - - # Create header and observation variables - var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) - var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) - var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) - var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) - var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) - - var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) - var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) - var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) - var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) - var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) - var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) - - if 0 == point_obs.npbhdr: - var_hdr_prpt_typ = None - var_hdr_irpt_typ = None - var_hdr_inst_typ = None - else: - dims_npbhdr = ('npbhdr',) - var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) - var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) - - var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) - var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) - var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) - var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) - var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) - var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) - - # Set variables - var_hdr_typ[:] = point_obs.hdr_typ[:] - var_hdr_sid[:] = point_obs.hdr_sid[:] - var_hdr_vld[:] = point_obs.hdr_vld[:] - var_hdr_lat[:] = point_obs.hdr_lat[:] - var_hdr_lon[:] = point_obs.hdr_lon[:] - var_hdr_elv[:] = point_obs.hdr_elv[:] - for i in range(0, point_obs.nhdr_typ): - for j in range(0, len(point_obs.hdr_typ_table[i])): - var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] - for i in range(0, point_obs.nhdr_sid): - for j in range(0, len(point_obs.hdr_sid_table[i])): - var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] - for i in range(0, point_obs.nhdr_vld): - for j in range(0, len(point_obs.hdr_vld_table[i])): - var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] - if 0 < point_obs.npbhdr: - var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] - var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] - var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] - - var_obs_qty[:] = point_obs.obs_qty[:] - var_obs_hid[:] = point_obs.obs_hid[:] - var_obs_vid[:] = point_obs.obs_vid[:] - var_obs_lvl[:] = point_obs.obs_lvl[:] - var_obs_hgt[:] = point_obs.obs_hgt[:] - var_obs_val[:] = point_obs.obs_val[:] - for i in range(0, point_obs.nobs_var): - for j in range(0, len(point_obs.obs_var_table[i])): - var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] - var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] - var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] - for i in range(0, point_obs.nobs_qty): - for j in range(0, len(point_obs.obs_qty_table[i])): - var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] - - # Set variable attributes - var_hdr_typ.long_name = "index of message type" - var_hdr_sid.long_name = "index of station identification" - var_hdr_vld.long_name = "index of valid time" - var_hdr_lat.long_name = "latitude" - var_hdr_lat.units = "degrees_north" - var_hdr_lon.long_name = "longitude" - var_hdr_lon.units = "degrees_east" - var_hdr_elv.long_name = "elevation" - var_hdr_elv.units = "meters above sea level (msl)" - - var_obs_qty.long_name = "index of quality flag" - var_obs_hid.long_name = "index of matching header data" - var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" - var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" - var_obs_hgt.long_name = "height in meters above sea level (msl)" - var_obs_val.long_name = "observation value" - var_hdr_typ_table.long_name = "message type" - var_hdr_sid_table.long_name = "station identification" - var_hdr_vld_table.long_name = "valid time" - var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" - var_obs_qty_table.long_name = "quality flag" - var_obs_var_table.long_name = "variable names" - var_obs_var_unit.long_name = "variable units" - var_obs_var_desc.long_name = "variable descriptions" + # args should be string, list, or dictionary + def get_nc_filename(self, args): + nc_filename = None + if isinstance(args, dict): + nc_filename = args.get('nc_name',None) + elif isinstance(args, list): + nc_filename = args[0] + elif args != ARG_PRINT_DATA: + nc_filename = args + + return nc_filename + + def read_data(self, nc_filename): + if nc_filename is None: + self.log_error_msg("The input NetCDF filename is missing") + elif not os.path.exists(nc_filename): + self.log_error_msg(f"input NetCDF file ({nc_filename}) does not exist") + else: + dataset = nc.Dataset(nc_filename, 'r') + + attr_name = 'use_var_id' + use_var_id_str = dataset.getncattr(attr_name) if attr_name in dataset.ncattrs() else "false" + self.use_var_id = use_var_id_str.lower() == 'true' + + # Header + self.hdr_typ = dataset['hdr_typ'][:] + self.hdr_sid = dataset['hdr_sid'][:] + self.hdr_vld = dataset['hdr_vld'][:] + self.hdr_lat = dataset['hdr_lat'][:] + self.hdr_lon = dataset['hdr_lon'][:] + self.hdr_elv = dataset['hdr_elv'][:] + self.hdr_typ_table = nc_tools.get_string_array(dataset, 'hdr_typ_table') + self.hdr_sid_table = nc_tools.get_string_array(dataset, 'hdr_sid_table') + self.hdr_vld_table = nc_tools.get_string_array(dataset, 'hdr_vld_table') + + nc_var = dataset.variables.get('obs_unit', None) + if nc_var: + self.obs_var_unit = nc_var[:] + nc_var = dataset.variables.get('obs_desc', None) + if nc_var: + self.obs_var_desc = nc_var[:] + + nc_var = dataset.variables.get('hdr_prpt_typ', None) + if nc_var: + self.hdr_prpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_irpt_typ', None) + if nc_var: + self.hdr_irpt_typ = nc_var[:] + nc_var = dataset.variables.get('hdr_inst_typ', None) + if nc_var: + self.hdr_inst_typ =nc_var[:] + + #Observation data + self.hdr_sid = dataset['hdr_sid'][:] + self.obs_qty = np.array(dataset['obs_qty'][:]) + self.obs_hid = np.array(dataset['obs_hid'][:]) + self.obs_lvl = np.array(dataset['obs_lvl'][:]) + self.obs_hgt = np.array(dataset['obs_hgt'][:]) + self.obs_val = np.array(dataset['obs_val'][:]) + nc_var = dataset.variables.get('obs_vid', None) + if nc_var is None: + self.use_var_id = False + nc_var = dataset.variables.get('obs_gc', None) + else: + self.obs_var_table = nc_tools.get_string_array(dataset, 'obs_var') + if nc_var: + self.obs_vid = np.array(nc_var[:]) + + self.obs_qty_table = nc_tools.get_string_array(dataset, 'obs_qty_table') + + def save_ncfile(self, nc_filename): + met_data = self.get_point_data() + with nc.Dataset(nc_filename, 'w') as nc_dataset: + self.set_nc_data(nc_dataset) + return met_data + + def set_nc_data(self, nc_dataset): + return nc_point_obs.write_nc_data(nc_dataset, self) + + @staticmethod + def write_nc_file(nc_filename, point_obs): + with nc.Dataset(nc_filename, 'w') as nc_dataset: + nc_point_obs.set_nc_data(nc_dataset, point_obs) + + @staticmethod + def write_nc_data(nc_dataset, point_obs): + do_nothing = False + if 0 == point_obs.nhdr: + do_nothing = True + base_met_point_obs.info_msg("the header is empty") + if 0 == point_obs.nobs: + do_nothing = True + base_met_point_obs.info_msg("the observation data is empty") + if do_nothing: + print() + return + + # Set global attributes + nc_dataset.MET_Obs_version = "1.02" ; + nc_dataset.use_var_id = "true" if point_obs.use_var_id else "false" + + # Create dimensions + nc_dataset.createDimension('mxstr', 16) + nc_dataset.createDimension('mxstr2', 40) + nc_dataset.createDimension('mxstr3', 80) + nc_dataset.createDimension('nhdr', point_obs.nhdr) + nc_dataset.createDimension('nobs', point_obs.nobs) + #npbhdr = len(point_obs.hdr_prpt_typ) + if 0 < point_obs.npbhdr: + nc_dataset.createDimension('npbhdr', point_obs.npbhdr) + nc_dataset.createDimension('nhdr_typ', point_obs.nhdr_typ) + nc_dataset.createDimension('nhdr_sid', point_obs.nhdr_sid) + nc_dataset.createDimension('nhdr_vld', point_obs.nhdr_vld) + nc_dataset.createDimension('nobs_qty', point_obs.nobs_qty) + nc_dataset.createDimension('obs_var_num', point_obs.nobs_var) + + type_for_string = 'S1' # np.byte + dims_hdr = ('nhdr',) + dims_obs = ('nobs',) + + # Create header and observation variables + var_hdr_typ = nc_dataset.createVariable('hdr_typ', np.int32, dims_hdr, fill_value=-9999) + var_hdr_sid = nc_dataset.createVariable('hdr_sid', np.int32, dims_hdr, fill_value=-9999) + var_hdr_vld = nc_dataset.createVariable('hdr_vld', np.int32, dims_hdr, fill_value=-9999) + var_hdr_lat = nc_dataset.createVariable('hdr_lat', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_lon = nc_dataset.createVariable('hdr_lon', np.float32, dims_hdr, fill_value=-9999.) + var_hdr_elv = nc_dataset.createVariable('hdr_elv', np.float32, dims_hdr, fill_value=-9999.) + + var_obs_qty = nc_dataset.createVariable('obs_qty', np.int32, dims_obs, fill_value=-9999) + var_obs_hid = nc_dataset.createVariable('obs_hid', np.int32, dims_obs, fill_value=-9999) + var_obs_vid = nc_dataset.createVariable('obs_vid', np.int32, dims_obs, fill_value=-9999) + var_obs_lvl = nc_dataset.createVariable('obs_lvl', np.float32, dims_obs, fill_value=-9999.) + var_obs_hgt = nc_dataset.createVariable('obs_hgt', np.float32, dims_obs, fill_value=-9999.) + var_obs_val = nc_dataset.createVariable('obs_val', np.float32, dims_obs, fill_value=-9999.) + + if 0 == point_obs.npbhdr: + var_hdr_prpt_typ = None + var_hdr_irpt_typ = None + var_hdr_inst_typ = None + else: + dims_npbhdr = ('npbhdr',) + var_hdr_prpt_typ = nc_dataset.createVariable('hdr_prpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_irpt_typ = nc_dataset.createVariable('hdr_irpt_typ', np.int32, dims_npbhdr, fill_value=-9999.) + var_hdr_inst_typ = nc_dataset.createVariable('hdr_inst_typ', np.int32, dims_npbhdr, fill_value=-9999.) + + var_hdr_typ_table = nc_dataset.createVariable('hdr_typ_table', type_for_string, ('nhdr_typ','mxstr2')) + var_hdr_sid_table = nc_dataset.createVariable('hdr_sid_table', type_for_string, ('nhdr_sid','mxstr2')) + var_hdr_vld_table = nc_dataset.createVariable('hdr_vld_table', type_for_string, ('nhdr_vld','mxstr')) + var_obs_qty_table = nc_dataset.createVariable('obs_qty_table', type_for_string, ('nobs_qty','mxstr')) + var_obs_var_table = nc_dataset.createVariable('obs_var', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_unit = nc_dataset.createVariable('obs_unit', type_for_string, ('obs_var_num','mxstr2')) + var_obs_var_desc = nc_dataset.createVariable('obs_desc', type_for_string, ('obs_var_num','mxstr3')) + + # Set variables + var_hdr_typ[:] = point_obs.hdr_typ[:] + var_hdr_sid[:] = point_obs.hdr_sid[:] + var_hdr_vld[:] = point_obs.hdr_vld[:] + var_hdr_lat[:] = point_obs.hdr_lat[:] + var_hdr_lon[:] = point_obs.hdr_lon[:] + var_hdr_elv[:] = point_obs.hdr_elv[:] + for i in range(0, point_obs.nhdr_typ): + for j in range(0, len(point_obs.hdr_typ_table[i])): + var_hdr_typ_table[i,j] = point_obs.hdr_typ_table[i][j] + for i in range(0, point_obs.nhdr_sid): + for j in range(0, len(point_obs.hdr_sid_table[i])): + var_hdr_sid_table[i,j] = point_obs.hdr_sid_table[i][j] + for i in range(0, point_obs.nhdr_vld): + for j in range(0, len(point_obs.hdr_vld_table[i])): + var_hdr_vld_table[i,j] = point_obs.hdr_vld_table[i][j] + if 0 < point_obs.npbhdr: + var_hdr_prpt_typ[:] = point_obs.hdr_prpt_typ[:] + var_hdr_irpt_typ[:] = point_obs.hdr_irpt_typ[:] + var_hdr_inst_typ[:] = point_obs.hdr_inst_typ[:] + + var_obs_qty[:] = point_obs.obs_qty[:] + var_obs_hid[:] = point_obs.obs_hid[:] + var_obs_vid[:] = point_obs.obs_vid[:] + var_obs_lvl[:] = point_obs.obs_lvl[:] + var_obs_hgt[:] = point_obs.obs_hgt[:] + var_obs_val[:] = point_obs.obs_val[:] + for i in range(0, point_obs.nobs_var): + for j in range(0, len(point_obs.obs_var_table[i])): + var_obs_var_table[i,j] = point_obs.obs_var_table[i][j] + var_obs_var_unit[i] = "" if i >= len(point_obs.obs_var_unit) else point_obs.obs_var_unit[i] + var_obs_var_desc[i] = "" if i >= len(point_obs.obs_var_desc) else point_obs.obs_var_desc[i] + for i in range(0, point_obs.nobs_qty): + for j in range(0, len(point_obs.obs_qty_table[i])): + var_obs_qty_table[i,j] = point_obs.obs_qty_table[i][j] + + # Set variable attributes + var_hdr_typ.long_name = "index of message type" + var_hdr_sid.long_name = "index of station identification" + var_hdr_vld.long_name = "index of valid time" + var_hdr_lat.long_name = "latitude" + var_hdr_lat.units = "degrees_north" + var_hdr_lon.long_name = "longitude" + var_hdr_lon.units = "degrees_east" + var_hdr_elv.long_name = "elevation" + var_hdr_elv.units = "meters above sea level (msl)" + + var_obs_qty.long_name = "index of quality flag" + var_obs_hid.long_name = "index of matching header data" + var_obs_vid.long_name = "index of BUFR variable corresponding to the observation type" + var_obs_lvl.long_name = "pressure level (hPa) or accumulation interval (sec)" + var_obs_hgt.long_name = "height in meters above sea level (msl)" + var_obs_val.long_name = "observation value" + var_hdr_typ_table.long_name = "message type" + var_hdr_sid_table.long_name = "station identification" + var_hdr_vld_table.long_name = "valid time" + var_hdr_vld_table.units = "YYYYMMDD_HHMMSS UTC" + var_obs_qty_table.long_name = "quality flag" + var_obs_var_table.long_name = "variable names" + var_obs_var_unit.long_name = "variable units" + var_obs_var_desc.long_name = "variable descriptions" # This is a sample drived class class sample_met_point_obs(met_point_obs): - #@abstractmethod - def read_data(self, arg_map={}): - self.hdr_typ = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) - self.hdr_sid = np.array([ 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3, 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3 ]) - self.hdr_vld = np.array([ 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3, 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3 ]) - self.hdr_lat = np.array([ 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46., 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46. ]) - self.hdr_lon = np.array([ -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92., -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92. ]) - self.hdr_elv = np.array([ 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220. ]) - - self.obs_hid = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 25 ]) - self.obs_vid = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) - self.obs_qty = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) - self.obs_lvl = np.array([ 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000. ]) - self.obs_hgt = np.array([ 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2. ]) - self.obs_val = np.array([ 292., 292.5, 293., 293.5, 294., 294.5, 295., 295.5, 296., 292., 293.4, 293., 296., 294., 92., 92.5, 93., 93.5, 94., 94.5, 95., 95.5, 96., 92., 93.4, 93., 96., 94. ]) - - self.hdr_typ_table = [ "ADPSFC" ] - self.hdr_sid_table = [ "001", "002", "003", "004" ] - self.hdr_vld_table = [ - "20120409_115000", "20120409_115500", "20120409_120100", "20120409_120500", "20120409_121000", - "20120409_120000" ] - self.obs_var_table = [ "TMP", "RH" ] - self.obs_qty_table = [ "NA" ] + #@abstractmethod + def read_data(self, arg_map={}): + self.hdr_typ = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) + self.hdr_sid = np.array([ 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3, 0, 0, 0, 0, 0, 1, 2, 3, 3, 1, 2, 2, 3 ]) + self.hdr_vld = np.array([ 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3, 0, 1, 2, 3, 4, 4, 3, 4, 3, 4, 5, 4, 3 ]) + self.hdr_lat = np.array([ 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46., 43., 43., 43., 43., 43., 43., 43., 43., 43., 46., 46., 46., 46. ]) + self.hdr_lon = np.array([ -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92., -89., -89., -89., -89., -89., -89., -89., -89., -89., -92., -92., -92., -92. ]) + self.hdr_elv = np.array([ 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220., 220. ]) + + self.obs_hid = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 25 ]) + self.obs_vid = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) + self.obs_qty = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]) + self.obs_lvl = np.array([ 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000., 1000. ]) + self.obs_hgt = np.array([ 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2. ]) + self.obs_val = np.array([ 292., 292.5, 293., 293.5, 294., 294.5, 295., 295.5, 296., 292., 293.4, 293., 296., 294., 92., 92.5, 93., 93.5, 94., 94.5, 95., 95.5, 96., 92., 93.4, 93., 96., 94. ]) + + self.hdr_typ_table = [ "ADPSFC" ] + self.hdr_sid_table = [ "001", "002", "003", "004" ] + self.hdr_vld_table = [ + "20120409_115000", "20120409_115500", "20120409_120100", "20120409_120500", "20120409_121000", + "20120409_120000" ] + self.obs_var_table = [ "TMP", "RH" ] + self.obs_qty_table = [ "NA" ] def convert_point_data(point_data, check_all_records=False, input_type='csv'): - tmp_point_data = {} - if 'csv' == input_type: - csv_point_data = csv_point_obs(point_data) - csv_point_data.check_csv_point_data(check_all_records) - tmp_point_data = csv_point_data.get_point_data() - else: - base_met_point_obs.error_msg('Not supported input type: {input_type}') - return tmp_point_data + tmp_point_data = {} + if 'csv' == input_type: + csv_point_data = csv_point_obs(point_data) + csv_point_data.check_csv_point_data(check_all_records) + tmp_point_data = csv_point_data.get_point_data() + else: + base_met_point_obs.error_msg('Not supported input type: {input_type}') + return tmp_point_data def main(): - args = {} # or args = [] - point_obs_data = sample_met_point_obs() - point_obs_data.read_data(args) - met_point_data = point_obs_data.get_point_data() + args = {} # or args = [] + point_obs_data = sample_met_point_obs() + point_obs_data.read_data(args) + met_point_data = point_obs_data.get_point_data() - point_obs_data.print_point_data(met_point_data, print_subset=False) + point_obs_data.print_point_data(met_point_data, print_subset=False) def main_nc(argv): - if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: - netcdf_filename = argv[1] - tmp_nc_name = 'tmp_met_point.nc' - point_obs_data = nc_point_obs() - point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) - met_point_data = point_obs_data.save_ncfile(tmp_nc_name) - print(f'{base_met_point_obs.get_prompt()} saved met_point_data to {tmp_nc_name}') - met_point_data['met_point_data'] = point_obs_data - - if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: - met_point_obs.print_point_data(met_point_data) + if len(argv) != 1 and argv[1] != ARG_PRINT_DATA: + netcdf_filename = argv[1] + tmp_nc_name = 'tmp_met_point.nc' + point_obs_data = nc_point_obs() + point_obs_data.read_data(point_obs_data.get_nc_filename(netcdf_filename)) + met_point_data = point_obs_data.save_ncfile(tmp_nc_name) + print(f'{base_met_point_obs.get_prompt()} saved met_point_data to {tmp_nc_name}') + met_point_data['met_point_data'] = point_obs_data + + if DO_PRINT_DATA or ARG_PRINT_DATA == argv[-1]: + met_point_obs.print_point_data(met_point_data) if __name__ == '__main__': - main() - print('Done python scripot') + main() + print('Done python scripot') From c893ed0cc8d7a73cc3a588709f6f1f4da1d676e0 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 14 Apr 2023 16:11:53 -0600 Subject: [PATCH 69/81] #2474 Formating (intend), Moved convert_point_data to under met_point_tools --- scripts/python/met/point.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py index 16e0b6c047..10c93f91aa 100644 --- a/scripts/python/met/point.py +++ b/scripts/python/met/point.py @@ -441,7 +441,7 @@ def check_csv_record(self, csv_point_data, index): error_msgs.append("{i}-th data: obs_value: only NA is accepted as string".format(i=index)) return error_msgs - def check_csv_point_data(self, all_records=False): + def check_csv_point_data(self, all_records=False): if 0 == len(self.point_data): self.add_error_msg("No data!") elif all_records: @@ -572,7 +572,7 @@ def convert_point_data(self): for key, idx in obs_var_map.items(): self.obs_var_table[idx] = key - def get_num_value(self, column_value): + def get_num_value(self, column_value): num_value = column_value if isinstance(column_value, str): if self.is_number(column_value): @@ -583,7 +583,7 @@ def get_num_value(self, column_value): self.log_info(f'{column_value} is not a number, converted to the missing value') return num_value - def is_grib_code(self): + def is_grib_code(self): grib_code = True for _point_data in self.point_data: if isinstance(_point_data[6], int): @@ -593,7 +593,7 @@ def is_grib_code(self): break; return grib_code - def is_num_string(self, column_value): + def is_num_string(self, column_value): is_string = isinstance(column_value, str) if is_string: is_num = True if self.is_number(column_value) or column_value.lower() == 'na' or column_value.lower() == 'n/a' else False From 08aeeec0a152c00fbd651759652c98f113568169 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Mon, 17 Apr 2023 09:09:15 -0600 Subject: [PATCH 70/81] #2285 Check and give wwarning if met_point_data or point_data is missing from python script. --- src/libcode/vx_pointdata_python/python_pointdata.cc | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/libcode/vx_pointdata_python/python_pointdata.cc b/src/libcode/vx_pointdata_python/python_pointdata.cc index 35e20e61e2..4b3834a1d2 100644 --- a/src/libcode/vx_pointdata_python/python_pointdata.cc +++ b/src/libcode/vx_pointdata_python/python_pointdata.cc @@ -546,12 +546,19 @@ if ( ! module_obj ) { bool result = false; PyObject *met_point_data = get_python_object(module_obj, python_key_point_data); -if ( met_point_data ) { +if ( met_point_data && met_point_data != &_Py_NoneStruct) { result = process_point_data(met_point_data, met_pd_out); } else { PyObject *point_data = get_python_object(module_obj, python_key_point_data_list); - result = process_point_data_list(point_data, met_pd_out, filters); + if ( point_data && point_data != &_Py_NoneStruct) + result = process_point_data_list(point_data, met_pd_out, filters); + else { + mlog << Warning << "\n" << method_name + << "no \"" << python_key_point_data << "\" and \"" + << python_key_point_data_list << "\" from " + << script_name << "\"\n\n"; + } } return result; From 465be3e55b754820e13c4168aefe0c1d4fc6de03 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 09:25:34 -0600 Subject: [PATCH 71/81] A few grammar/typo fixes. --- docs/Users_Guide/appendixF.rst | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d9cf6bbd2d..8cdbd4c894 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -7,7 +7,7 @@ Appendix F Python Embedding Introduction ============ -MET includes the ability to embed Python to a limited degree. Users may use their own Python scripts and whatever associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. +MET includes the ability to embed Python to a limited degree. Users may use their own Python scripts and any associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. .. _compiling_python_support: @@ -28,7 +28,7 @@ In order to use Python embedding, a local Python installation must be available 6. **Xarray** Python package -Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. +Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process, and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. In addition to using **\-\-enable-python** with **configure** as mentioned above, the following environment variables must also be set prior to executing **configure**: **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**. These may either be set as environment variables or as command line options to **configure**. These environment variables are used when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable in the users local Python installation, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: @@ -40,7 +40,7 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. -Controlling Which Python MET uses When Running +Controlling Which Python MET Uses When Running ============================================== When MET is compiled with Python embedding support, MET uses the Python executable in that Python installation by default when Python embedding is used. However, for users of highly configurable Python environments, the Python instance set at compilation time may not be sufficient. Users may want to use an alternate Python installation if they need additional packages not available in the Python installation used when compiling MET. In MET versions 9.0+, users have the ability to use a different Python executable when running MET than the version used when compiling MET by setting the environment variable **MET_PYTHON_EXE**. @@ -324,7 +324,7 @@ On the command line for any of the MET tools, specify the path to the input grid Examples of Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -**GridStat with Python embedding for forecast and observations** +**Grid-Stat with Python embedding for forecast and observations** .. code-block:: none :caption: GridStat Command with Dual Python Embedding @@ -450,7 +450,7 @@ Both of the above examples use the **read_ascii_point.py** example script which Examples of Python Embedding for Point Observations ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -**PointStat with Python embedding for forecast and observations** +**Point-Stat with Python embedding for forecast and observations** .. code-block:: none :caption: PointStat Command with Dual Python Embedding @@ -537,14 +537,5 @@ The **read_ascii_mpr.py** sample script can be found in: Examples of Python Embedding for MPR Data ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -TODO: Is there another example that might be useful here? Probably not I suppose. +TODO: Is there another example that might be useful here? Probably not I suppose, since it's a command-line based functionality. -MET Python Module -================= - -TODO: Maybe document some of the base classes and functions here? - -I think the most important is: -met.dataplane.set_dataplane_attrs() - -Maybe add others later on. From d28d5504c31c7d1c0453175e62b486eabc508edd Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 10:30:32 -0600 Subject: [PATCH 72/81] Adds error messages for Python embedding if MET is not Python enabled. --- docs/Users_Guide/appendixF.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 8cdbd4c894..54e1cb8628 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -38,7 +38,20 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above • Again on the command line, run "**python3-config \-\-ldflags**". Set the value of **MET_PYTHON_LD** to the output of that command. -Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. +Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure** + +If a user attempts to invoke Python embedding with a version of MET that was not compiled with Python, MET will return an ERROR: + +.. code-block:: none + :caption: MET Errors Without Python Enabled + + ERROR : Met2dDataFileFactory::new_met_2d_data_file() -> Support for Python has not been compiled! + ERROR : To run Python scripts, recompile with the --enable-python option. + + - or - + + ERROR : process_point_obs() -> Support for Python has not been compiled! + ERROR : To run Python scripts, recompile with the --enable-python option. Controlling Which Python MET Uses When Running ============================================== From 8b5989d13a1997d7cd930e6b5d3336ccb4ade587 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 10:49:33 -0600 Subject: [PATCH 73/81] Removes examples for MPR/StatAnalysis since it's really just command line only and no config file is involved. --- docs/Users_Guide/appendixF.rst | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 54e1cb8628..22d1c3cf62 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -547,8 +547,3 @@ The **read_ascii_mpr.py** sample script can be found in: • `MET GitHub repository `_ in *MET/scripts/python/examples*. -Examples of Python Embedding for MPR Data -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -TODO: Is there another example that might be useful here? Probably not I suppose, since it's a command-line based functionality. - From 716f922f01f4f65657b7fe5ac5af5c33bf8ce9d1 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 11:17:25 -0600 Subject: [PATCH 74/81] Adds brief blurb about the MET Python package. --- docs/Users_Guide/appendixF.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 22d1c3cf62..b743f3b124 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -547,3 +547,16 @@ The **read_ascii_mpr.py** sample script can be found in: • `MET GitHub repository `_ in *MET/scripts/python/examples*. +MET Python Package +================== + +MET comes with a Python package that provides core functionality for the Python embedding capability. In rare cases, advanced users may find the classes and functions included with this Python package useful. + +To utilize the MET Python package **standalone** when NOT using it with Python embedding, users must add the following to their **PYTHONPATH** environment variable: + +.. code-block:: + :caption: MET Python Module PYTHONPATH + + export PYTHONPATH={MET_INSTALL_DIR}/share/met/python + +where {MET_INSTALL_DIR} is the top level directory where MET is installed, for example **/usr/local/met**. From 5244b66fc571f59c098a4c6457c983fb1f24d301 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 11:38:33 -0600 Subject: [PATCH 75/81] Corrects path in MPR example and removes MET_BASE. --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index b743f3b124..d1b45576ff 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -535,7 +535,7 @@ Stat-Analysis can be run using the **-lookin python** command line option: :caption: Stat-Analysis with Python Embedding of MPR Data stat_analysis \ - -lookin python MET_BASE/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ + -lookin python scripts/python/examples/read_ascii_mpr.py point_stat_mpr.txt \ -job aggregate_stat -line_type MPR -out_line_type CNT \ -by FCST_VAR,FCST_LEV From d199e17fdcd95962b73aeb4c8bc9b31fafb03b8b Mon Sep 17 00:00:00 2001 From: Dan Adriaansen Date: Mon, 17 Apr 2023 12:10:34 -0600 Subject: [PATCH 76/81] Update docs/Users_Guide/appendixF.rst Co-authored-by: jprestop --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d1b45576ff..4da9644cf7 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -28,7 +28,7 @@ In order to use Python embedding, a local Python installation must be available 6. **Xarray** Python package -Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process, and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** opotion to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files with Python embedding is used. +Users should be aware that in some cases, the C-language Python header files and libraries may be deleted at the end of the Python installation process, and they may need to confirm their availability prior to compiling MET. Once the user has confirmed the above requirements are satisfied, they can compile the MET software for Python embedding by passing the **\-\-enable-python** option to the **configure** script on the command line. This will link the MET C++ code directly to the Python libraries. The **NumPy** and **netCDF4** Python packages are required by the Python scripts included with the MET software that facilitate the passing of data in memory and the reading and writing of temporary files when Python embedding is used. In addition to using **\-\-enable-python** with **configure** as mentioned above, the following environment variables must also be set prior to executing **configure**: **MET_PYTHON_BIN_EXE**, **MET_PYTHON_CC**, and **MET_PYTHON_LD**. These may either be set as environment variables or as command line options to **configure**. These environment variables are used when building MET to enable the compiler to find the requisite Python executable, header files, and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable in the users local Python installation, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: From fb43a7756173403dbd0b32fdaa1a9383cc4c4f6f Mon Sep 17 00:00:00 2001 From: Dan Adriaansen Date: Mon, 17 Apr 2023 12:10:41 -0600 Subject: [PATCH 77/81] Update docs/Users_Guide/appendixF.rst Co-authored-by: jprestop --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 4da9644cf7..71f983e953 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -36,7 +36,7 @@ In addition to using **\-\-enable-python** with **configure** as mentioned above • On the command line, run "**python3-config \-\-cflags**". Set the value of **MET_PYTHON_CC** to the output of that command. -• Again on the command line, run "**python3-config \-\-ldflags**". Set the value of **MET_PYTHON_LD** to the output of that command. +• Again on the command line, run "**python3-config \-\-ldflags \-\-embed**". Set the value of **MET_PYTHON_LD** to the output of that command. Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure** From 9890fd817b05862ff38affe2e39546335a6ff7bf Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Mon, 17 Apr 2023 12:12:38 -0600 Subject: [PATCH 78/81] Adds missing scripts directory for two commands. --- docs/Users_Guide/appendixF.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index d1b45576ff..828fe5c0cc 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -442,7 +442,7 @@ The Point2Grid, Plot-Point-Obs, Ensemble-Stat, and Point-Stat tools support Pyth :caption: plot_point_obs with Python Embedding plot_point_obs \ - "PYTHON_NUMPY=python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + "PYTHON_NUMPY=scripts/python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ output_image.ps The ASCII2NC tool also supports Python embedding, however invoking it varies slightly from other MET tools. For ASCII2NC, Python embedding is used by providing the "-format python" option on the command line. With this option, point observations may be passed as input. An example of this is shown below: @@ -451,7 +451,7 @@ The ASCII2NC tool also supports Python embedding, however invoking it varies sli :caption: ascii2nc with Python Embedding ascii2nc -format python \ - "python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ + "scripts/python/examples/read_ascii_point.py data/sample_obs/ascii/sample_ascii_obs.txt" \ sample_ascii_obs_python.nc Both of the above examples use the **read_ascii_point.py** example script which is included with the MET code. It reads ASCII data in MET's 11-column point observation format and stores it in a Pandas DataFrame to be read by the MET tools using Python embedding for point data. The **read_ascii_point.py** example script can be found in: From 9364ae1a59449f05a7e783187a997bd1d70dd3ad Mon Sep 17 00:00:00 2001 From: Dan Adriaansen Date: Tue, 18 Apr 2023 16:33:53 -0600 Subject: [PATCH 79/81] Update appendixF.rst Fixes typo in Appendix F. --- docs/Users_Guide/appendixF.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 892ac00a49..d2848c1500 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -524,7 +524,7 @@ If a user does not have an existing MPR line type file created by the MET tools, dtype=str) # Convert to the variable MET expects - met_data = mpr_dataframe.values.tolist() + mpr_data = mpr_dataframe.values.tolist() Running Python Embedding for MPR Data ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From ccd26121fb8ba111436e7ae23d8b3f2beee29ac0 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Wed, 19 Apr 2023 13:57:42 -0600 Subject: [PATCH 80/81] Updates the MET_PYTHON_INPUT_ARG section of the documentation. --- docs/Users_Guide/appendixF.rst | 43 +++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 8 deletions(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 892ac00a49..6538a067bf 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -322,17 +322,44 @@ The first argument for the Plot-Data-Plane tool is the gridded data file to be r Special Case for Ensemble-Stat, Series-Analysis, and MTD ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Since Ensemble-Stat, Series-Analysis, and MTD read multiple input files, a different approach to using Python embedding is required. This approach can be used in any of the MET tools, but it is required when using Python embedding with Ensemble-Stat, Series-Analysis, and MTD. The Ensemble-Stat, Series-Analysis, and MTD tools support the use of file lists on the command line, as do some other MET tools. Typically, the ASCII file list contains a list of files which actually exist on your machine and should be read as input. For Python embedding, these tools loop over the ASCII file list entries, set **MET_PYTHON_INPUT_ARG** to that string, and execute the Python script. This only allows a single command line argument to be passed to the Python script. However multiple arguments may be concatenated together using some delimiter, and the Python script can be defined to parse arguments using that delimiter. When file lists are constructed in this way, the entries will likely not be files which actually exist on your machine. In this case, users should place the constant string "file_list" on the first line of their ASCII file lists. This will ensure that the MET tools will parse the file list properly. +The Ensemble-Stat, Series-Analysis, MTD and Gen-Ens-Prod tools all have the ability to read multiple input files. Because of this feature, a different approach to Python embedding is required. A typical use of these tools is to provide a list of files on the command line. For example: -On the command line for any of the MET tools, specify the path to the input gridded data file(s) as the usage statement for the tool indicates. Do **not** substitute in **PYTHON_NUMPY** or **PYTHON_XARRAY** on the command line for this case. Instead, in the config file dictionary set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate Python embedding in MET. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, list the full path to the Python script to be run followed by any command line arguments for that script. However, in the Python command, replace the name of the input gridded data file with the constant string **MET_PYTHON_INPUT_ARG**. When looping over multiple input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the file currently being processed. The example plot_data_plane command listed below yields the same result as the example shown above, but using the approach for this special case: +.. code-block:: + :caption: Gen-Ens-Prod Command Line -.. code-block:: none - :caption: plot_data_plane Python Embedding using MET_PYTHON_INPUT_ARG + gen_ens_prod ens1.nc ens2.nc ens3.nc ens4.nc -out ens_prod.nc -config GenEnsProd_config - plot_data_plane data/python/fcst.txt fcst.ps \ - name="scripts/python/examples/read_ascii_numpy.py MET_PYTHON_INPUT_ARG FCST"; \ - file_type=PYTHON_NUMPY;' \ - -title "Python enabled plot_data_plane" +In this case, a user is passing 4 ensemble members to Gen-Ens-Prod to be evaluated, and each member is in a separate file. If a user wishes to use Python embedding to process the ensemble input files, then the same exact command is used however special modifications inside the GenEnsProd_config file are needed. In the config file dictionary, the user must set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate the Python embedding for these tools. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, the user must list the **full path** to the Python script to be run. However, in the Python command, replace the name of the input gridded data file to the Python script with the constant string **MET_PYTHON_INPUT_ARG**. When looping over all of the input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the input file currently being processed and optionally, any command line arguments for the Python script. Here is what this looks like in the GenEnsProd_config file for the above example: + +.. code-block:: + :caption: Gen-Ens-Prod MET_PYTHON_INPUT_ARG Config + + file_type = PYTHON_NUMPY; + field = [ { name = "gen_ens_prod_pyembed.py MET_PYTHON_INPUT_ARG"; } ]; + +In the event the user requires command line arguments to their Python script, they must be included alongside the file names separated by a delimiter. For example, the above Gen-Ens-Prod command with command line arguments for Python would look like: + +.. code-block:: + :caption: Gen-Ens-Prod Command Line with Python Args + + gen_ens_proce ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2 -out ens_prod.nc -config GenEnsProd_config + +In this case, the user's Python script will receive "ens1.nc,arg1,arg2" as a single command line argument for each execution of the Python script (i.e. 1 time per file). The user must parse this argument inside their Python script to obtain **arg1** and **arg2** as separate arguments. The list of input files and optionally, any command line arguments can be written to a single file called **file_list** that is substituted for the file names and command line arguments. For example: + +.. code-block:: + :caption: Gen-Ens-Prod File List + + echo "ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2" > file_list + gen_ens_prod file_list -out ens_prod.nc -config GenEnsProd_config + +Finally, the above tools do not require data files to be present on a local disk. If the user wishes, their Python script can obtain data from other sources based upon only the command line arguments to their Python script. For example: + +.. code-block:: + :caption: Gen-Ens-Prod Python Args Only + + gen_ens_prod 20230101,0 20230102,0 20230103,0 -out ens_prod.nc -confg GenEnsProd_config + +In the above command, each of the arguments "20230101,0", "20230102,0", and "20230103,0" are provided to the user's Python script in separate calls. Then, inside the Python script these arguments are used to construct a filename or query to a data server or other mechanism to return the desired data and format it the way MET expects inside the Python script, prior to calling Gen-Ens-Prod. Examples of Python Embedding for 2D Gridded Dataplanes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From a849167f052449f4e46dfd41512193aad2190fb6 Mon Sep 17 00:00:00 2001 From: Daniel Adriaansen Date: Wed, 19 Apr 2023 14:13:19 -0600 Subject: [PATCH 81/81] Breaks long command into 2 lines for readability. --- docs/Users_Guide/appendixF.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 832eda25f0..f028c87326 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -342,7 +342,8 @@ In the event the user requires command line arguments to their Python script, th .. code-block:: :caption: Gen-Ens-Prod Command Line with Python Args - gen_ens_proce ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2 -out ens_prod.nc -config GenEnsProd_config + gen_ens_proce ens1.nc,arg1,arg2 ens2.nc,arg1,arg2 ens3.nc,arg1,arg2 ens4.nc,arg1,arg2 \ + -out ens_prod.nc -config GenEnsProd_config In this case, the user's Python script will receive "ens1.nc,arg1,arg2" as a single command line argument for each execution of the Python script (i.e. 1 time per file). The user must parse this argument inside their Python script to obtain **arg1** and **arg2** as separate arguments. The list of input files and optionally, any command line arguments can be written to a single file called **file_list** that is substituted for the file names and command line arguments. For example: