diff --git a/met/docs/Users_Guide/config_options.rst b/met/docs/Users_Guide/config_options.rst index 564e222e95..d8acbab286 100644 --- a/met/docs/Users_Guide/config_options.rst +++ b/met/docs/Users_Guide/config_options.rst @@ -81,7 +81,7 @@ The configuration file language supports the following data types: * The following percentile threshold types are supported: * "SFP" for a percentile of the sample forecast values. - e.g. ">SFP50" means greater than the 50-th forecast percentile. + e.g. ">SFP33.3" means greater than the 33.3-rd forecast percentile. * "SOP" for a percentile of the sample observation values. e.g. ">SOP75" means greater than the 75-th observation percentile. diff --git a/met/docs/conf.py b/met/docs/conf.py index efa6f948c9..13f65b1b9d 100644 --- a/met/docs/conf.py +++ b/met/docs/conf.py @@ -24,7 +24,7 @@ verinfo = version release = f'{version}' release_year = '2021' -release_date = f'{release_year}0302' +release_date = f'{release_year}-03-31' copyright = f'{release_year}, {author}' # -- General configuration --------------------------------------------------- diff --git a/met/docs/index.rst b/met/docs/index.rst index 4e00f3168d..a66a482aa1 100644 --- a/met/docs/index.rst +++ b/met/docs/index.rst @@ -1,16 +1,95 @@ ===================== MET version |version| ===================== -Developed by the `Developmental Testbed Center `_, Boulder, CO +Developed by the `Developmental Testbed Center `_, +Boulder, CO .. image:: _static/METplus_banner_photo_web.png History ------- -The Model Evaluation Tools (MET) were developed by the Developmental Testbed Center (DTC) and released in January 2008. The goal of the tools was to provide the community with a platform independent and extensible framework for reproducible verification. The DTC partners, including NCAR, NOAA, and the USAF, decided to start by replicating the NOAA EMC (see list of acronyms below) Mesoscale Branch verification package, called VSDB. In the first release, MET included several pre-processing, statistical, and analysis tools to provided the primary functionality as the EMC VSDB system, and also included a spatial verification package called MODE. +The Model Evaluation Tools (MET) were developed by the Developmental Testbed +Center (DTC) and released in January 2008. The goal of the tools was to +provide the community with a platform-independent and extensible framework +for reproducible verification. +The DTC partners, including NCAR, NOAA, and the USAF, decided to start by +replicating the NOAA EMC (see list of acronyms below) Mesoscale Branch +verification package, called VSDB. +In the first release, MET included several pre-processing, statistical, +and analysis tools to provide the same primary functionality as the EMC VSDB +system, and also included a spatial verification package called MODE. -Over the years, MET and VSDB packages grew in complexity. Verification capability at other NOAA laboratories, such as ESRL, were also under heavy development. An effort to unify verification capability was first started under the HIWPP project and led by NOAA ESRL. In 2015, the NGGPS Program Office started working groups to focus on several aspects of the next gen system, including the Verification and Validation Working Group. This group made the recommendation to use MET as the foundation for a unified verification capability. In 2016, NCAR and GSD leads visited EMC to gather requirements. At that time, the concept of METplus was developed as it extends beyond the original code base. It was originally called METplus but several constraints have driven the transition to the use of METplus. METplus is now the unified verification, validation, and diagnostics capability for NOAA's UFS and a component of NCAR's SIMA modeling frameworks. It being actively developed by NCAR, ESRL, EMC and is open to community contributions. +Over the years, MET and VSDB packages grew in complexity. Verification +capability at other NOAA laboratories, such as ESRL, were also under heavy +development. An effort to unify verification capability was first started +under the HIWPP project and led by NOAA ESRL. In 2015, the NGGPS +Program Office started working groups to focus on several aspects of the +next gen system, including the Verification and Validation Working Group. +This group made the recommendation to use MET as the foundation for a +unified verification capability. In 2016, NCAR and GSD leads visited EMC +to gather requirements. At that time, the concept of METplus was developed +as it extends beyond the original code base. It was originally MET+ but +several constraints have driven the transition to the use of METplus. +METplus is now the unified verification, validation, and +diagnostics capability for NOAA's UFS and a component of NCAR's SIMA +modeling frameworks. It is being actively developed by NCAR, ESRL, EMC +and is open to community contributions. +METplus Concept +--------------- +METplus is the overarching, or umbrella, repository and hence framework for +the Unified Forecast System verification capability. It is intended to be +extensible through adding additional capability developed by the community. +The core components of the framework include MET, the associated database and +display systems called METviewer and METexpress, and a suite of Python +wrappers to provide low-level automation and examples, also called use-cases. +A description of each tool along with some ancillary repositories are as +follows: + +* **MET** - core statistical tool that matches up grids with either gridded + analyses or point observations and applies configurable methods to compute + statistics and diagnostics +* **METviewer** - core database and display system intended for deep analysis + of MET output +* **METexpress** - core database and display system intended for quick + analysis via pre-defined queries of MET output +* **METplus wrappers** - suite of Python-based wrappers that provide + low-level automation of MET tools and newly developed plotting capability +* **METplus use-cases** - configuration files and sample data to show how to + invoke METplus wrappers to make using MET tools easier and reproducible +* **METcalcpy** - suite of Python-based scripts to be used by other + components of METplus tools for statistical aggregation, event + equalization, and other analysis needs +* **METplotpy** - suite of Python-based scripts to plot MET output, + and in come cases provide additional post-processing of output prior + to plotting +* **METdatadb** - database to store MET output and to be used by both + METviewer and METexpress + +The umbrella repository will be brought together by using a software package +called `manage_externals `_ +developed by the Community Earth System Modeling (CESM) team, hosted at NCAR +and NOAA Earth System's Research Laboratory. The manage_externals paackage +was developed because CESM is comprised of a number of different components +that are developed and managed independently. Each component also may have +additional "external" dependencies that need to be maintained independently. + +Acronyms +-------- + +* **MET** - Model Evaluation Tools +* **DTC** - Developmental Testbed Center +* **NCAR** - National Center for Atmospheric Research +* **NOAA** - National Oceanic and Atmospheric Administration +* **EMC** - Environmental Modeling Center +* **VSDB** - Verification Statistics Data Base +* **MODE** - Method for Object-Based Diagnostic Evaluation +* **UFS** - Unified Forecast System +* **SIMA** -System for Integrated Modeling of the Atmosphere +* **ESRL** - Earth Systems Research Laboratory +* **HIWPP** - High Impact Weather Predication Project +* **NGGPS** - Next Generation Global Predicatio System +* **GSD** - Global Systems Division .. toctree:: :hidden: diff --git a/met/src/basic/vx_config/my_config_scanner.cc b/met/src/basic/vx_config/my_config_scanner.cc index 57246913cf..1acae0582b 100644 --- a/met/src/basic/vx_config/my_config_scanner.cc +++ b/met/src/basic/vx_config/my_config_scanner.cc @@ -169,6 +169,8 @@ static bool replace_env(ConcatString &); static bool is_fort_thresh_no_spaces(); +static bool is_simple_perc_thresh(); + static int do_simple_perc_thresh(); @@ -370,6 +372,8 @@ if ( is_float_v2() ) { if ( do_float() ) return ( token(FLOAT) ); } if ( is_fort_thresh_no_spaces() ) { return ( do_fort_thresh() ); } +if ( is_simple_perc_thresh() ) { return ( do_simple_perc_thresh() ); } + int t; if ( is_id() ) { t = do_id(); return ( token(t) ); } @@ -533,7 +537,6 @@ if ( is_lhs ) { strncpy(configlval.text, configtext, max_id_length); return ( if ( strcmp(configtext, "print" ) == 0 ) { return ( PRINT ); } - // // boolean? // @@ -554,17 +557,13 @@ for (j=0; jlookup(configtext); if ( e && (e->is_number()) && (! is_lhs) ) { - // cout << "=================== id = \"" << configtext << "\" is_lhs = " << (is_lhs ? "true" : "false") << "\n"; - - // cout << "do_id() -> \n"; - // e->dump(cout); - if ( e->type() == IntegerType ) { set_int(configlval.nval, e->i_value()); @@ -613,28 +607,20 @@ if ( e && (! is_lhs) && (e->type() == UserFunctionType) ) { } - /////////////////////////////////////////////////////////////////////// - - - - // // fortran threshold without spaces? (example: "le150") // -if ( (strncmp(configtext, "lt", 2) == 0) && is_number(configtext + 2, max_id_length - 2) ) { return ( do_fort_thresh() ); } - for (j=0; j " @@ -1482,11 +1493,8 @@ if ( index < 0 ) { } - configlval.pc_info.perc_index = index; -// configlval.pc_info.is_simple = true; -configlval.pc_info.value = value; -// configlval.pc_info.value2 = bad_data_double;; +configlval.pc_info.value = value; return ( SIMPLE_PERC_THRESH ); @@ -1495,9 +1503,3 @@ return ( SIMPLE_PERC_THRESH ); //////////////////////////////////////////////////////////////////////// - - - - - - diff --git a/met/src/basic/vx_config/threshold.cc b/met/src/basic/vx_config/threshold.cc index 7879f7090a..b75ec9784a 100644 --- a/met/src/basic/vx_config/threshold.cc +++ b/met/src/basic/vx_config/threshold.cc @@ -166,6 +166,37 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double Or_Node::climo_prob() const + +{ + +if ( !left_child || !right_child ) { + + mlog << Error << "\nOr_Node::climo_prob() -> " + << "node not populated!\n\n"; + + exit ( 1 ); + +} + +double prob = bad_data_double; +double prob_left = left_child->climo_prob(); +double prob_right = right_child->climo_prob(); + +if ( !is_bad_data(prob_left) && !is_bad_data(prob_right) ) { + + prob = min(prob_left + prob_right, 1.0); + +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Or_Node::need_perc() const { @@ -356,6 +387,55 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double And_Node::climo_prob() const + +{ + +if ( !left_child || !right_child ) { + + mlog << Error << "\nAnd_Node::climo_prob() -> " + << "node not populated!\n\n"; + + exit ( 1 ); + +} + +double prob = bad_data_double; +double prob_left = left_child->climo_prob(); +double prob_right = right_child->climo_prob(); + + // + // For opposing inequalities, compute the difference in percentiles + // + +if ( !is_bad_data(prob_left) && !is_bad_data(prob_right) ) { + + // + // Support complex threshold types >a&&b + // + + if ( ( left_child->type() == thresh_gt || left_child->type() == thresh_ge ) && + ( right_child->type() == thresh_lt || right_child->type() == thresh_le ) ) { + + prob = max( 0.0, prob_right - ( 1.0 - prob_left ) ); + + } + else if ( ( left_child->type() == thresh_lt || left_child->type() == thresh_le ) && + ( right_child->type() == thresh_gt || right_child->type() == thresh_ge ) ) { + + prob = max( 0.0, prob_left - ( 1.0 - prob_right ) ); + + } +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool And_Node::need_perc() const { @@ -540,6 +620,23 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double Not_Node::climo_prob() const + +{ + +double prob = bad_data_double; +double prob_child = child->climo_prob(); + +if ( !is_bad_data(prob_child) ) prob = 1.0 - prob_child; + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Not_Node::need_perc() const { @@ -1065,6 +1162,59 @@ return; //////////////////////////////////////////////////////////////////////// +double Simple_Node::climo_prob() const + +{ + +double prob = bad_data_double; + +if ( Ptype == perc_thresh_climo_dist ) { + + // Climo probability varies based on the threshold type + switch ( op ) { + + case thresh_lt: + case thresh_le: + + prob = PT/100.0; + break; + + case thresh_eq: + + prob = 0.0; + break; + + case thresh_ne: + + prob = 1.0; + break; + + case thresh_gt: + case thresh_ge: + + prob = 1.0 - PT/100.0; + break; + + default: + + mlog << Error << "\nSimple_Node::climo_prob() -> " + << "cannot convert climatological distribution percentile " + << "threshold to a probability!\n\n"; + + exit ( 1 ); + break; + + } // switch +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Simple_Node::need_perc() const { diff --git a/met/src/basic/vx_config/threshold.h b/met/src/basic/vx_config/threshold.h index ebca96a81c..493173e58d 100644 --- a/met/src/basic/vx_config/threshold.h +++ b/met/src/basic/vx_config/threshold.h @@ -157,6 +157,8 @@ class ThreshNode { virtual double pvalue() const = 0; + virtual double climo_prob() const = 0; + virtual bool need_perc() const = 0; virtual void set_perc(const NumArray *, const NumArray *, const NumArray *) = 0; @@ -197,6 +199,8 @@ class Or_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -217,10 +221,10 @@ class Or_Node : public ThreshNode { //////////////////////////////////////////////////////////////////////// -inline ThreshType Or_Node::type() const { return ( thresh_complex ); } -inline double Or_Node::value() const { return ( bad_data_double ); } -inline PercThreshType Or_Node::ptype() const { return ( no_perc_thresh_type ); } -inline double Or_Node::pvalue() const { return ( bad_data_double ); } +inline ThreshType Or_Node::type() const { return ( thresh_complex ); } +inline double Or_Node::value() const { return ( bad_data_double ); } +inline PercThreshType Or_Node::ptype() const { return ( no_perc_thresh_type ); } +inline double Or_Node::pvalue() const { return ( bad_data_double ); } //////////////////////////////////////////////////////////////////////// @@ -244,6 +248,8 @@ class And_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -293,6 +299,8 @@ class Not_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -363,6 +371,8 @@ class Simple_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void get_simple_nodes(vector &) const; @@ -435,6 +445,7 @@ class SingleThresh { double get_value() const; PercThreshType get_ptype() const; double get_pvalue() const; + double get_climo_prob() const; void get_simple_nodes(vector &) const; void multiply_by(const double); @@ -451,11 +462,11 @@ class SingleThresh { //////////////////////////////////////////////////////////////////////// -inline ThreshType SingleThresh::get_type() const { return ( node ? node->type() : thresh_na ); } -inline double SingleThresh::get_value() const { return ( node ? node->value() : bad_data_double ); } -inline PercThreshType SingleThresh::get_ptype() const { return ( node ? node->ptype() : no_perc_thresh_type ); } -inline double SingleThresh::get_pvalue() const { return ( node ? node->pvalue() : bad_data_double ); } - +inline ThreshType SingleThresh::get_type() const { return ( node ? node->type() : thresh_na ); } +inline double SingleThresh::get_value() const { return ( node ? node->value() : bad_data_double ); } +inline PercThreshType SingleThresh::get_ptype() const { return ( node ? node->ptype() : no_perc_thresh_type ); } +inline double SingleThresh::get_pvalue() const { return ( node ? node->pvalue() : bad_data_double ); } +inline double SingleThresh::get_climo_prob() const { return ( node ? node->climo_prob() : bad_data_double ); } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_statistics/pair_base.cc b/met/src/libcode/vx_statistics/pair_base.cc index 8066ed262f..0fe6a1b006 100644 --- a/met/src/libcode/vx_statistics/pair_base.cc +++ b/met/src/libcode/vx_statistics/pair_base.cc @@ -1064,46 +1064,21 @@ NumArray derive_climo_prob(const ClimoCDFInfo &cdf_info, const NumArray &mn_na, const NumArray &sd_na, const SingleThresh &othresh) { int i, n_mn, n_sd; - double prob; NumArray climo_prob, climo_vals; + double prob; // Number of valid climo mean and standard deviation n_mn = mn_na.n_valid(); n_sd = sd_na.n_valid(); - // For CDP threshold types, the climo probability is constant - if(othresh.get_ptype() == perc_thresh_climo_dist) { - - // Climo probability varies based on the threshold type - switch(othresh.get_type()) { - - case thresh_lt: - case thresh_le: - prob = othresh.get_pvalue()/100.0; - break; - - case thresh_eq: - prob = 0.0; - break; - - case thresh_ne: - prob = 1.0; - break; + // Check for constant climo probability + if(!is_bad_data(prob = othresh.get_climo_prob())) { - case thresh_gt: - case thresh_ge: - prob = 1.0 - othresh.get_pvalue()/100.0; - break; - - default: - mlog << Error << "\nderive_climo_prob() -> " - << "climatological threshold \"" << othresh.get_str() - << "\" cannot be converted to a probability!\n\n"; - exit(1); - break; - } + mlog << Debug(4) + << "For threshold " << othresh.get_str() + << ", using a constant climatological probability value of " + << prob << ".\n"; - // Add constant climo probability value climo_prob.add_const(prob, n_mn); } // If both mean and standard deviation were provided, use them to diff --git a/met/src/libcode/vx_tc_util/track_info.cc b/met/src/libcode/vx_tc_util/track_info.cc index b7c443c0f3..312d9aa620 100644 --- a/met/src/libcode/vx_tc_util/track_info.cc +++ b/met/src/libcode/vx_tc_util/track_info.cc @@ -801,6 +801,7 @@ bool TrackInfoArray::add(const ATCFTrackLine &l, bool check_dup, bool check_anly TrackInfo t; t.add(l, check_dup, check_anly); Track.push_back(t); + status = true; } return(status); diff --git a/met/src/tools/other/pb2nc/pb2nc.cc b/met/src/tools/other/pb2nc/pb2nc.cc index b2b6eba1fe..b36095ccf5 100644 --- a/met/src/tools/other/pb2nc/pb2nc.cc +++ b/met/src/tools/other/pb2nc/pb2nc.cc @@ -2428,7 +2428,7 @@ void write_netcdf_hdr_data() { // Check for no messages retained if(dim_count <= 0) { - mlog << Error << method_name << " -> " + mlog << Error << "\n" << method_name << " -> " << "No PrepBufr messages retained. Nothing to write.\n\n"; // Delete the NetCDF file remove_temp_file(ncfile); @@ -2920,16 +2920,27 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, float *pqtzuv_tq, *pqtzuv_uv; float *pqtzuv_merged = (float *) 0; float *next_pqtzuv, *prev_pqtzuv; + float tq_pres_max, tq_pres_min, uv_pres_max, uv_pres_min; std::map::iterator it, it_tq, it_uv; // Gets pressure levels for TQZ records - for (it=pqtzuv_map_tq.begin(); it!=pqtzuv_map_tq.end(); ++it) { - tq_levels.add(int(it->first)); + it = pqtzuv_map_tq.begin(); + tq_pres_min = tq_pres_max = it->first; + for (; it!=pqtzuv_map_tq.end(); ++it) { + float pres_v = it->first; + if (tq_pres_min > pres_v) tq_pres_min = pres_v; + if (tq_pres_max < pres_v) tq_pres_max = pres_v; + tq_levels.add(nint(pres_v)); } // Gets pressure levels for common records - for (it=pqtzuv_map_uv.begin(); it!=pqtzuv_map_uv.end(); ++it) { - if (tq_levels.has(int(it->first))) { - common_levels.add(int(it->first)); + it = pqtzuv_map_uv.begin(); + uv_pres_min = uv_pres_max = it->first; + for (; it!=pqtzuv_map_uv.end(); ++it) { + float pres_v = it->first; + if (uv_pres_min > pres_v) uv_pres_min = pres_v; + if (uv_pres_max < pres_v) uv_pres_max = pres_v; + if (tq_levels.has(nint(pres_v))) { + common_levels.add(nint(pres_v)); } } @@ -2937,22 +2948,36 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, log_tqz_and_uv(pqtzuv_map_tq, pqtzuv_map_uv, method_name); } + bool no_overlap = (tq_pres_max < uv_pres_min) || (tq_pres_min > uv_pres_max); + mlog << Debug(6) << method_name << "TQZ pressures: " << tq_pres_max + << " to " << tq_pres_min << " UV pressures: " << uv_pres_max + << " to " << uv_pres_min << (no_overlap ? " no overlap!" : " overlapping") << "\n"; + if( no_overlap ) { + mlog << Warning << "\n" << method_name + << "Can not combine TQ and UV records because of no overlapping." + << " TQZ count: " << tq_count << ", UV count: " << uv_count + << " common_levels: " << common_levels.n() << "\n\n"; + return pqtzuv_map_merged.size(); + } + // Select first record by 1) merging two records with the same pressure // level or 2) interpolate + int tq_pres, uv_pres; next_pqtzuv = (float *)0; it_tq = pqtzuv_map_tq.begin(); it_uv = pqtzuv_map_uv.begin(); pqtzuv_tq = (float *)it_tq->second; pqtzuv_uv = (float *)it_uv->second;; pqtzuv_merged = new float[mxr8vt]; - if (common_levels.has(int(it_tq->first)) - || common_levels.has(int(it_uv->first))) { + tq_pres = nint(it_tq->first); + uv_pres = nint(it_uv->first); + if (common_levels.has(tq_pres) || common_levels.has(uv_pres)) { // Found the records with the same precsure level - if (it_tq->first != it_uv->first) { - if (common_levels.has(int(it_uv->first))) { + if (tq_pres != uv_pres) { + if (common_levels.has(uv_pres)) { pqtzuv_uv = pqtzuv_map_uv[it_uv->first]; } - else if (common_levels.has(int(it_tq->first))) { + else if (common_levels.has(tq_pres)) { pqtzuv_tq = pqtzuv_map_tq[it_tq->first]; } } @@ -2968,7 +2993,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, prev_pqtzuv = (float *)it_uv->second; ++it_uv; } - next_pqtzuv = it_uv->second; + next_pqtzuv = (float *)it_uv->second; } else { //Interpolate TQZ into UV @@ -2978,7 +3003,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, prev_pqtzuv = (float *)it_tq->second; ++it_tq; } - next_pqtzuv = it_tq->second; + next_pqtzuv = (float *)it_tq->second; } interpolate_pqtzuv(prev_pqtzuv, pqtzuv_merged, next_pqtzuv); } @@ -2996,6 +3021,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, if(mlog.verbosity_level() >= PBL_DEBUG_LEVEL) { log_merged_tqz_uv(pqtzuv_map_tq, pqtzuv_map_uv, pqtzuv_map_merged, method_name); } + delete [] pqtzuv_merged; } return pqtzuv_map_merged.size(); @@ -3034,7 +3060,7 @@ float compute_pbl(map pqtzuv_map_tq, hgt_cnt = spfh_cnt = 0; for (it=pqtzuv_map_merged.begin(); it!=pqtzuv_map_merged.end(); ++it) { if (index < 0) { - mlog << Error << method_name << "negative index: " << index << "\n"; + mlog << Error << "\n" << method_name << "negative index: " << index << "\n\n"; break; } @@ -3048,13 +3074,13 @@ float compute_pbl(map pqtzuv_map_tq, pbl_data_vgrd[index] = pqtzuv[5]; if (!is_eq(pbl_data_spfh[index], bad_data_float)) spfh_cnt++; if (!is_eq(pbl_data_hgt[index], bad_data_float)) hgt_cnt++; - selected_levels.add(int(it->first)); + selected_levels.add(nint(it->first)); } index--; } if (index != -1) { - mlog << Error << method_name << "Missing some levels (" << index << ")\n"; + mlog << Error << "\n" << method_name << "Missing some levels (" << index << ")\n"; } if (pbl_level > MAX_PBL_LEVEL) { @@ -3070,7 +3096,7 @@ float compute_pbl(map pqtzuv_map_tq, if (!is_eq(highest_pressure, bad_data_float)) { index = MAX_PBL_LEVEL - 1; for (; it!=pqtzuv_map_tq.end(); ++it) { - int pres_level = int(it->first); + int pres_level = nint(it->first); if (selected_levels.has(pres_level)) break; float *pqtzuv = pqtzuv_map_merged[it->first]; @@ -3139,10 +3165,10 @@ void insert_pbl(float *obs_arr, const float pbl_value, const int pbl_code, hdr_info << unix_to_yyyymmdd_hhmmss(hdr_vld_ut) << " " << hdr_typ << " " << hdr_sid; if (is_eq(pbl_value, bad_data_float)) { - mlog << Warning << "Failed to compute PBL " << hdr_info << "\n\n"; + mlog << Warning << "\nFailed to compute PBL " << hdr_info << "\n\n"; } else if (pbl_value < hdr_elv) { - mlog << Warning << "Not saved because the computed PBL (" << pbl_value + mlog << Warning << "\nNot saved because the computed PBL (" << pbl_value << ") is less than the station elevation (" << hdr_elv << "). " << hdr_info << "\n\n"; obs_arr[4] = 0; @@ -3156,7 +3182,7 @@ void insert_pbl(float *obs_arr, const float pbl_value, const int pbl_code, << " lat: " << hdr_lat << ", lon: " << hdr_lon << ", elv: " << hdr_elv << " " << hdr_info << "\n\n"; if (obs_arr[4] > MAX_PBL) { - mlog << Warning << " Computed PBL (" << obs_arr[4] << " from " + mlog << Warning << "\nComputed PBL (" << obs_arr[4] << " from " << pbl_value << ") is too high, Reset to " << MAX_PBL << " " << hdr_info<< "\n\n"; obs_arr[4] = MAX_PBL; @@ -3192,9 +3218,14 @@ int interpolate_by_pressure(int length, float *pres_data, float *var_data) { << var_data[idx_start] << " and " << var_data[idx_end] << "\n"; float data_diff = var_data[idx_end] - var_data[idx_start]; for (idx2 = idx_start+1; idx2 pqtzuv_map_pivot, if (first_pres < it_pivot->first) break; } mlog << Debug(8) << method_name << "pivot->first: " << it_pivot->first - << " aux->first " << it_aux->first << " first_pres: " << first_pres - << " prev_pqtzuv[0]" << prev_pqtzuv[0] << "\n"; + << " aux->first: " << it_aux->first << " first_pres: " << first_pres + << " prev_pqtzuv[0]: " << prev_pqtzuv[0] << "\n"; // Find next UV level for (; it_aux!=pqtzuv_map_aux.end(); ++it_aux) { // Skip the records below the first mathcing/interpolated level diff --git a/test/config/PB2NCConfig_pbl b/test/config/PB2NCConfig_pbl new file mode 100644 index 0000000000..eeedd7a3e4 --- /dev/null +++ b/test/config/PB2NCConfig_pbl @@ -0,0 +1,162 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// PB2NC configuration file. +// +// For additional information, see the MET_BASE/config/README file. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// PrepBufr message type +// +message_type = ["ONLYSF", "ADPUPA"]; + +// +// Mapping of message type group name to comma-separated list of values +// Derive PRMSL only for SURFACE message types +// +message_type_group_map = [ + { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; }, + { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; }, + { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; }, + { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; } +]; + +// +// Mapping of input PrepBufr message types to output message types +// +message_type_map = []; + +// +// PrepBufr station ID +// +station_id = []; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation time window +// +obs_window = { + beg = -2700; + end = 2700; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation retention regions +// +mask = { + grid = ""; + poly = ""; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observing location elevation +// +elevation_range = { + beg = -1000; + end = 100000; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation types +// +pb_report_type = []; + +in_report_type = []; + +instrument_type = []; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Vertical levels to retain +// +level_range = { + beg = 1; + end = 511; +} + +level_category = [0, 1, 4, 5, 6]; + +/////////////////////////////////////////////////////////////////////////////// + +// +// BUFR variable names to retain or derive. +// Use obs_bufr_map to rename variables in the output. +// If empty, process all available variables. +// +obs_bufr_var = ["D_CAPE", "D_PBL"]; +//////////////////////////////////////////////////////////////////////////////// + +// +// Mapping of input BUFR variable names to output variables names. +// The default PREPBUFR map, obs_prepbufr_map, is appended to this map. +// +obs_bufr_map = []; + +// +// Default mapping for PREPBUFR. Replace input BUFR variable names with GRIB +// abbreviations in the output. This default map is appended to obs_bufr_map. +// This should not typically be overridden. +// +obs_prefbufr_map = [ + { key = "POB"; val = "PRES"; }, + { key = "QOB"; val = "SPFH"; }, + { key = "TOB"; val = "TMP"; }, + { key = "UOB"; val = "UGRD"; }, + { key = "VOB"; val = "VGRD"; }, + { key = "D_DPT"; val = "DPT"; }, + { key = "D_WDIR"; val = "WDIR"; }, + { key = "D_WIND"; val = "WIND"; }, + { key = "D_RH"; val = "RH"; }, + { key = "D_MIXR"; val = "MIXR"; }, + { key = "D_PBL"; val = "HPBL"; }, + { key = "D_PRMSL"; val = "PRMSL"; }, + { key = "D_CAPE"; val = "CAPE"; }, + { key = "TDO"; val = "DPT"; }, + { key = "PMO"; val = "PRMSL"; }, + { key = "TOCC"; val = "TCDC"; }, + { key = "HOVI"; val = "VIS"; }, + { key = "CEILING"; val = "HGT"; }, + { key = "MXGS"; val = "GUST"; } +]; + +//////////////////////////////////////////////////////////////////////////////// + +quality_mark_thresh = 9; +event_stack_flag = TOP; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Time periods for the summarization +// obs_var (string array) is added and works like grib_code (int array) +// when use_var_id is enabled and variable names are saved. +// +time_summary = { + flag = FALSE; + raw_data = FALSE; + beg = "000000"; + end = "235959"; + step = 300; + width = 600; + grib_code = []; + obs_var = [ "TMP", "WDIR", "RH" ]; + type = [ "min", "max", "range", "mean", "stdev", "median", "p80" ]; + vld_freq = 0; + vld_thresh = 0.0; +} + +//////////////////////////////////////////////////////////////////////////////// + +tmp_dir = "/tmp"; +version = "V10.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/test/xml/unit_pb2nc.xml b/test/xml/unit_pb2nc.xml index a65f52110d..0366d902f9 100644 --- a/test/xml/unit_pb2nc.xml +++ b/test/xml/unit_pb2nc.xml @@ -131,6 +131,25 @@ + + &MET_BIN;/pb2nc + + STATION_ID + MASK_GRID + MASK_POLY + QUALITY_MARK_THRESH 2 + + \ + &DATA_DIR_OBS;/prepbufr/nam.20210311.t00z.prepbufr.tm00 \ + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.pbl.nc \ + &CONFIG_DIR;/PB2NCConfig_pbl \ + -v 1 + + + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.pbl.nc + + + &MET_BIN;/pb2nc