diff --git a/parm/atm/jcb-prototype_3dvar.yaml.j2 b/parm/atm/jcb-prototype_3dvar.yaml.j2
index 4330a87bd..ca01ca799 100644
--- a/parm/atm/jcb-prototype_3dvar.yaml.j2
+++ b/parm/atm/jcb-prototype_3dvar.yaml.j2
@@ -14,7 +14,17 @@ observations:
- atms_n20
# - atms_npp
- conventional_ps
- - gnssro
+ - gnssro_cosmic2
+ - gnssro_spire
+ - gnssro_s6
+ - gnssro_geoopt
+ - gnssro_grace
+ - gnssro_k5
+ - gnssro_metop
+ - gnssro_paz
+ - gnssro_piq
+ - gnssro_tsx
+ - gnssro_tdm
# - gpsro
# - iasi_metop-a
# - iasi_metop-b
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.json
new file mode 100755
index 000000000..26934388a
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.json
@@ -0,0 +1,20 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "cosmic2",
+ "satellite_info" : [
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E1", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E1", "satellite_id" : 750 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E2", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E2", "satellite_id" : 751 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E3", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E3", "satellite_id" : 752 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E4", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E4", "satellite_id" : 753 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E5", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E5", "satellite_id" : 754 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "COSMIC-2 E6", "satellite_full_name" : "Constellation Observing System for Meteorology, Ionosphere, and Climate-2 E6", "satellite_id" : 755 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.json
new file mode 100755
index 000000000..2461fceee
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.json
@@ -0,0 +1,16 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "geoopt",
+ "satellite_info" : [
+ { "sensor_name" : "CION", "sensor_full_name" : "CICERO Instrument for GNSS-RO ", "sensor_id" : 526, "satellite_name" : "CICERO-1 OP1", "satellite_full_name" : "Community Initiative for Cellular Earth Remote Observation OP1", "satellite_id" : 265 },
+ { "sensor_name" : "CION", "sensor_full_name" : "CICERO Instrument for GNSS-RO ", "sensor_id" : 526, "satellite_name" : "CICERO-1 OP1", "satellite_full_name" : "Community Initiative for Cellular Earth Remote Observation OP2", "satellite_id" : 266 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_grace.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_grace.json
new file mode 100755
index 000000000..398b3559c
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_grace.json
@@ -0,0 +1,16 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "GFZ",
+ "mission" : "grace",
+ "satellite_info" : [
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "GRACE C", "satellite_full_name" : "Gravity Recovery and Climate Experiment Follow-On C", "satellite_id" : 803 },
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "GRACE D", "satellite_full_name" : "Gravity Recovery and Climate Experiment Follow-On D", "satellite_id" : 804 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_k5.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_k5.json
new file mode 100755
index 000000000..d234f6e2b
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_k5.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "k5",
+ "satellite_info" : [
+ { "sensor_name" : "IGOR", "sensor_full_name" : "Integrated GPS and Occultation Receiver", "sensor_id" : 103, "satellite_name" : "KOMPSAT-9", "satellite_full_name" : "Korean Multi-Purpose Satellite", "satellite_id" : 825}
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_metop.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_metop.json
new file mode 100755
index 000000000..167e7cbb6
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_metop.json
@@ -0,0 +1,17 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "DMI",
+ "mission" : "metop",
+ "satellite_info" : [
+ { "sensor_name" : "GRAS", "sensor_full_name" : "GNSS Receiver for Atmospheric Sounding", "sensor_id" : 202, "satellite_name" : "MetOp-B", "satellite_full_name" : "Meteorological Operational satellite B", "satellite_id" : 3 },
+ { "sensor_name" : "GRAS", "sensor_full_name" : "GNSS Receiver for Atmospheric Sounding", "sensor_id" : 202, "satellite_name" : "MetOp-A", "satellite_full_name" : "Meteorological Operational satellite A", "satellite_id" : 4 },
+ { "sensor_name" : "GRAS", "sensor_full_name" : "GNSS Receiver for Atmospheric Sounding", "sensor_id" : 202, "satellite_name" : "MetOp-C", "satellite_full_name" : "Meteorological Operational satellite C", "satellite_id" : 5 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_paz.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_paz.json
new file mode 100755
index 000000000..a34f60282
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_paz.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "paz",
+ "satellite_info" : [
+ { "sensor_name" : "IGOR", "sensor_full_name" : "Integrated GPS and Occultation Receiver", "sensor_id" : 103, "satellite_name" : "PAZ", "satellite_full_name" : "Paz satellite", "satellite_id" : 44}
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_piq.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_piq.json
new file mode 100755
index 000000000..680c4a8d4
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_piq.json
@@ -0,0 +1,16 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "piq",
+ "satellite_info" : [
+ { "sensor_name" : "Pyxis-RO", "sensor_full_name" : "Pyxis-RO", "sensor_id" : 534, "satellite_name" : "PlanetIQ", "satellite_full_name" : "PLANETIQ GNOMES-A", "satellite_id" : 267 },
+ { "sensor_name" : "Pyxis-RO", "sensor_full_name" : "Pyxis-RO", "sensor_id" : 534, "satellite_name" : "PlanetIQ", "satellite_full_name" : "PLANETIQ GNOMES-B", "satellite_id" : 268 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_s6.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_s6.json
new file mode 100755
index 000000000..cb5fcc3c1
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_s6.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "JPL",
+ "mission" : "s6",
+ "satellite_info" : [
+ { "sensor_name" : "Tri-G", "sensor_full_name" : "Triple-G", "sensor_id" : 104, "satellite_name" : "Sentinel-6A", "satellite_full_name" : "Sentinel-6 Michael Freilich", "satellite_id" : 66 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_spire.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_spire.json
new file mode 100755
index 000000000..8e3acd469
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_spire.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "UCAR",
+ "mission" : "spire",
+ "satellite_info" : [
+ { "sensor_name" : "STRATOS", "sensor_full_name" : "STRATOS", "sensor_id" : 530, "satellite_name" : "Spire", "satellite_full_name" : "SPIRE LEMUR 3U CUBESAT", "satellite_id" : 269 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.json
new file mode 100755
index 000000000..3d62143f8
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "GFZ",
+ "mission" : "tdm",
+ "satellite_info" : [
+ { "sensor_name" : "IGOR", "sensor_full_name" : "Integrated GPS and Occultation Receiver", "sensor_id" : 103, "satellite_name" : "TanDEM-X", "satellite_full_name" : "TerraSAR-X add-on for Digital Elevation Measurement", "satellite_id" : 43 }
+ ]
+}
diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.json b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.json
new file mode 100755
index 000000000..82f35a4e4
--- /dev/null
+++ b/parm/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.json
@@ -0,0 +1,15 @@
+{
+ "data_format" : "bufr_d",
+ "data_type" : "gpsro",
+ "cycle_type" : "{{ RUN }}",
+ "cycle_datetime" : "{{ current_cycle | to_YMDH }}",
+ "dump_directory" : "{{ DMPDIR }}",
+ "ioda_directory" : "{{ COM_OBS }}",
+ "subsets" : [ "NC003010" ],
+ "data_description" : "Satellite radio occultation data",
+ "data_provider" : "GFZ",
+ "mission" : "tsx",
+ "satellite_info" : [
+ { "sensor_name" : "IGOR", "sensor_full_name" : "Integrated GPS and Occultation Receiver", "sensor_id" : 103, "satellite_name" : "TerraSAR-X", "satellite_full_name" : "X-band TerraSAR satellite", "satellite_id" : 42 }
+ ]
+}
diff --git a/ush/eva/marine_eva_post.py b/ush/eva/marine_eva_post.py
deleted file mode 100755
index b537ddb3a..000000000
--- a/ush/eva/marine_eva_post.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python3
-import argparse
-import datetime
-import logging
-import os
-import socket
-import yaml
-import glob
-
-# sets the cmap vmin/vmax for each variable
-# TODO: this should probably be in a yaml or something
-vminmax = {'seaSurfaceTemperature': {'vmin': -2.0, 'vmax': 2.0},
- 'seaIceFraction': {'vmin': -0.2, 'vmax': 0.2},
- 'seaSurfaceSalinity': {'vmin': -0.2, 'vmax': 0.2}, # TODO: this should be changed
- 'absoluteDynamicTopography': {'vmin': -0.2, 'vmax': 0.2},
- 'waterTemperature': {'vmin': -2.0, 'vmax': 2.0},
- 'salinity': {'vmin': -0.2, 'vmax': 0.2}}
-
-
-def marine_eva_post(inputyaml, outputdir, diagdir):
- logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')
- try:
- with open(inputyaml, 'r') as inputyaml_opened:
- input_yaml_dict = yaml.safe_load(inputyaml_opened)
- logging.info(f'Loading input YAML from {inputyaml}')
- except Exception as e:
- logging.error(f'Error occurred when attempting to load: {inputyaml}, error: {e}')
- for dataset in input_yaml_dict['datasets']:
- newfilenames = []
- for filename in dataset['filenames']:
- newfilename = os.path.join(diagdir, os.path.basename(filename))
- newfilenames.append(newfilename)
- dataset['filenames'] = newfilenames
- for graphic in input_yaml_dict['graphics']['figure_list']:
- # this assumes that there is only one variable, or that the
- # variables are all the same
- variable = graphic['batch figure']['variables'][0]
- for plot in graphic['plots']:
- for layer in plot['layers']:
- if layer['type'] == 'MapScatter':
- layer['vmin'] = vminmax[variable]['vmin']
- layer['vmax'] = vminmax[variable]['vmax']
-
- # first, let us prepend some comments that tell someone this output YAML was generated
- now = datetime.datetime.now()
- prepend_str = ''.join([
- f'# This YAML file automatically generated by marine_eva_post.py\n',
- f'# on {socket.gethostname()} at {now.strftime("%Y-%m-%dT%H:%M:%SZ")}\n',
- ])
-
- outputyaml = os.path.join(outputdir, os.path.basename(inputyaml))
- # open output file for writing and start the find/replace process
- try:
- logging.info(f'Writing modified YAML to {outputyaml}')
- with open(outputyaml, 'w') as yaml_out:
- yaml_out.write(prepend_str)
- yaml.dump(input_yaml_dict, yaml_out)
- except Exception as e:
- logging.error(f'Error occurred when attempting to write: {outputyaml}, error: {e}')
-
-
-if __name__ == "__main__":
-
- parser = argparse.ArgumentParser()
- parser.add_argument('-i', '--inputyaml', type=str, help='Input YAML to modify', required=True)
- parser.add_argument('-o', '--outputdir', type=str, help='Directory to send output YAML', required=True)
- parser.add_argument('-d', '--diagdir', type=str, help='Location of diag files', required=True)
- args = parser.parse_args()
- marine_eva_post(args.inputyaml, args.outputdir, args.diagdir)
diff --git a/ush/eva/marine_gdas_plots.yaml b/ush/eva/marine_gdas_plots.yaml
deleted file mode 100644
index 0a903d0c4..000000000
--- a/ush/eva/marine_gdas_plots.yaml
+++ /dev/null
@@ -1,222 +0,0 @@
-# template YAML file to create EVA YAML files
-# based on obs spaces listed in JEDI YAML files
-datasets:
- - name: experiment
- type: IodaObsSpace
- filenames:
- - @FILENAME@
- @CHANNELSKEY@
- groups:
- - name: ObsValue
- variables: &variables @VARIABLES@
- - name: ObsError
- - name: ombg
- - name: oman
- - name: hofx0
- - name: EffectiveQC0
- - name: MetaData
- - name: PreQC
-transforms:
-
- # bkg
- - transform: arithmetic
- new name: experiment::bkg::${variable}
- equals: experiment::ObsValue::${variable}-experiment::ombg::${variable}
- for:
- variable: *variables
-
- # Generate omb that passed QC for JEDI
- - transform: accept where
- new name: experiment::OmBQC::${variable}
- starting field: experiment::ombg::${variable}
- where:
- - experiment::EffectiveQC0::${variable} == 0
- for:
- variable: *variables
-
- # Generate oma that passed QC for JEDI
- - transform: accept where
- new name: experiment::OmAQC::${variable}
- starting field: experiment::oman::${variable}
- where:
- - experiment::EffectiveQC0::${variable} == 0
- for:
- variable: *variables
-
- # Generate oma that passed QC for JEDI
- - transform: accept where
- new name: experiment::hofxQC::${variable}
- starting field: experiment::hofx0::${variable}
- where:
- - experiment::EffectiveQC0::${variable} == 0
- for:
- variable: *variables
-
-
-
-
-
-graphics:
-
- plotting_backend: Emcpy
- figure_list:
-
- # ---------- Map Plots ----------
- # Map plot of OmBQC
- # --------
-
- - batch figure:
- variables: *variables
- @CHANNELSKEY@
- dynamic options:
- - type: vminvmaxcmap
- data variable: experiment::OmBQC::${variable}
- figure:
- layout: [1,1]
- figure size: [20,10]
- title: 'OmB post QC | @NAME@ @CYCLE@ | ${variable_title}'
- output name: map_plots/@NAME@/${variable}/@CHANNELVAR@/@NAME@_${variable}@CHANNELVAR@OmBQC.png
- tight_layout: true
- plots:
- - mapping:
- projection: plcarr
- domain: global
- add_map_features: ['coastline']
- add_grid:
- add_colorbar:
- label: '${variable}'
- layers:
- - type: MapScatter
- longitude:
- variable: experiment::MetaData::longitude
- latitude:
- variable: experiment::MetaData::latitude
- data:
- variable: experiment::OmBQC::${variable}
- @CHANNELKEY@
- markersize: 0.01
- label: '$(variable)'
- colorbar: true
- # below may need to be edited/removed
- cmap: 'seismic'
- vmin: ${dynamic_vmin}
- vmax: ${dynamic_vmax}
-
-
- # Histogram plots
- # ---------------
-
- # OmA pre and post QC
- - batch figure:
- variables: *variables
- figure:
- layout: [1,1]
- title: 'OmA pre- and post QC | @NAME@ | ${variable_title}'
- output name: histograms/@NAME@/${variable}/oma_pre_post_qc_${variable}.png
- plots:
- - add_xlabel: 'Observation minus anl pre- and post-QC'
- add_ylabel: 'Count'
- add_legend:
- loc: 'upper left'
- statistics:
- fields:
- - field_name: experiment::OmAQC::${variable}
- xloc: 0.5
- yloc: -0.10
- kwargs:
- fontsize: 6
- statistics_variables:
- - n
- - min
- - mean
- - max
- - std
- layers:
- - type: Histogram
- data:
- variable: experiment::OmAQC::${variable}
- color: 'blue'
- label: 'OmA (post QC)'
- bins: 100
- alpha: 0.5
- - type: Histogram
- data:
- variable: experiment::oman::${variable}
- color: 'red'
- label: 'OmA (pre QC)'
- bins: 100
- alpha: 0.5
-
- # diff between OmA and OmB
- - batch figure:
- variables: *variables
- figure:
- layout: [1,1]
- title: 'OmA and OmB, post-QC | @NAME@ | ${variable_title}'
- output name: histograms/@NAME@/${variable}/oma_omb_histogram_${variable}.png
- plots:
- - add_xlabel: 'OmA and OmB post-QC'
- add_ylabel: 'Count'
- add_legend:
- loc: 'upper left'
- statistics:
- fields:
- - field_name: experiment::OmAQC::${variable}
- xloc: 0.5
- yloc: -0.10
- kwargs:
- fontsize: 6
- statistics_variables:
- - n
- - min
- - mean
- - max
- - std
- layers:
- - type: Histogram
- data:
- variable: experiment::OmAQC::${variable}
- color: 'blue'
- label: 'OmA (post QC)'
- bins: 100
- alpha: 0.5
- - type: Histogram
- data:
- variable: experiment::OmBQC::${variable}
- color: 'red'
- label: 'OmB (post QC)'
- bins: 100
- alpha: 0.5
-
- - batch figure:
- variables: *variables
- @CHANNELSKEY@
- figure:
- layout: [1,1]
- title: 'Observations vs. JEDI h(x) | @NAME@ @CYCLE@ | ${variable_title}'
- output name: observation_scatter_plots/jedi_hofx_vs_obs_@CYCLE@_@NAME@_${variable}@CHANNELVAR@.png
- plots:
- - add_xlabel: 'Observation Value'
- add_ylabel: 'JEDI h(x)'
- add_grid:
- add_legend:
- loc: 'upper left'
- layers:
- - type: Scatter
- x:
- variable: experiment::ObsValue::${variable}
- y:
- variable: experiment::hofx0::${variable}
- @CHANNELKEY@
- markersize: 1
- color: 'black'
- label: 'JEDI h(x) versus obs (all obs)'
- - type: Scatter
- x:
- variable: experiment::ObsValue::${variable}
- y:
- variable: experiment::hofxQC::${variable}
- @CHANNELKEY@
- markersize: 1
- color: 'red'
- label: 'JEDI h(x) versus obs (passed QC in JEDI)'
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_cosmic2.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_geoopt.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_grace.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_grace.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_grace.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_k5.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_k5.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_k5.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_metop.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_metop.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_metop.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_paz.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_paz.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_paz.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_piq.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_piq.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_piq.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_s6.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_s6.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_s6.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_spire.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_spire.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_spire.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tdm.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.py b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.py
new file mode 100755
index 000000000..e9addc978
--- /dev/null
+++ b/ush/ioda/bufr2ioda/bufr2ioda_gnssro_tsx.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# This software is licensed under the terms of the Apache Licence Version 2.0
+# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
+
+import sys
+import os
+import argparse
+import json
+import numpy as np
+import numpy.ma as ma
+import math
+import calendar
+import time
+import datetime
+from pyiodaconv import bufr
+from collections import namedtuple
+from pyioda import ioda_obs_space as ioda_ospace
+from wxflow import Logger
+
+# ====================================================================
+# GPS-RO BUFR dump file
+# =====================================================================
+# NC003010 | GPS-RO
+# ====================================================================
+
+
+def Derive_stationIdentification(said, ptid):
+
+ stid = []
+ for i in range(len(said)):
+ newval = str(said[i]).zfill(4)+str(ptid[i]).zfill(4)
+ stid.append(str(newval))
+ stid = np.array(stid).astype(dtype='str')
+ stid = ma.array(stid)
+ ma.set_fill_value(stid, "")
+
+ return stid
+
+
+def Compute_Grid_Location(degrees):
+
+ for i in range(len(degrees)):
+ if degrees[i] <= 360 and degrees[i] >= -180:
+ degrees[i] = np.deg2rad(degrees[i])
+ rad = degrees
+
+ return rad
+
+
+def Compute_imph(impp, elrc, geodu):
+
+ imph = (impp - elrc - geodu).astype(np.float32)
+
+ return imph
+
+
+def bufr_to_ioda(config, logger):
+
+ subsets = config["subsets"]
+ logger.debug(f"Checking subsets = {subsets}")
+
+ # =========================================
+ # Get parameters from configuration
+ # =========================================
+ data_format = config["data_format"]
+ data_type = config["data_type"]
+ ioda_data_type = "gnssro"
+ data_description = config["data_description"]
+ data_provider = config["data_provider"]
+ cycle_type = config["cycle_type"]
+ dump_dir = config["dump_directory"]
+ ioda_dir = config["ioda_directory"]
+ mission = config["mission"]
+ satellite_info_array = config["satellite_info"]
+ cycle = config["cycle_datetime"]
+ yyyymmdd = cycle[0:8]
+ hh = cycle[8:10]
+
+ bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}"
+ DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh),
+ 'atmos', bufrfile)
+
+ # ============================================
+ # Make the QuerySet for all the data we want
+ # ============================================
+ start_time = time.time()
+
+ logger.debug(f"Making QuerySet ...")
+ q = bufr.QuerySet(subsets)
+
+ # MetaData
+ q.add('latitude', '*/ROSEQ1/CLATH')
+ q.add('longitude', '*/ROSEQ1/CLONH')
+ q.add('gridLatitude', '*/ROSEQ1/CLATH')
+ q.add('gridLongitude', '*/ROSEQ1/CLONH')
+ q.add('year', '*/YEAR')
+ q.add('year2', '*/YEAR')
+ q.add('month', '*/MNTH')
+ q.add('day', '*/DAYS')
+ q.add('hour', '*/HOUR')
+ q.add('minute', '*/MINU')
+ q.add('second', '*/SECO')
+ q.add('satelliteIdentifier', '*/SAID')
+ q.add('satelliteInstrument', '*/SIID')
+ q.add('satelliteConstellationRO', '*/SCLF')
+ q.add('satelliteTransmitterId', '*/PTID')
+ q.add('earthRadiusCurvature', '*/ELRC')
+ q.add('sequenceNumber', '*/SEQNUM')
+ q.add('geoidUndulation', '*/GEODU')
+ q.add('height', '*/ROSEQ3/HEIT')
+ q.add('impactParameterRO_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/IMPP')
+ q.add('impactParameterRO_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/IMPP')
+ q.add('impactParameterRO_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/IMPP')
+ q.add('frequency__roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/MEFR')
+ q.add('frequency__roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/MEFR')
+ q.add('frequency__roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/MEFR')
+ q.add('pccf', '*/PCCF[1]')
+ q.add('percentConfidence', '*/ROSEQ3/PCCF')
+ q.add('sensorAzimuthAngle', '*/BEARAZ')
+
+ # Processing Center
+ q.add('dataProviderOrigin', '*/OGCE')
+
+ # Quality Information
+ q.add('qualityFlags', '*/QFRO')
+ q.add('qfro', '*/QFRO')
+ q.add('satelliteAscendingFlag', '*/QFRO')
+
+ # ObsValue
+ q.add('bendingAngle_roseq2repl1', '*/ROSEQ1/ROSEQ2{1}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl2', '*/ROSEQ1/ROSEQ2{2}/BNDA[1]')
+ q.add('bendingAngle_roseq2repl3', '*/ROSEQ1/ROSEQ2{3}/BNDA[1]')
+ q.add('atmosphericRefractivity', '*/ROSEQ3/ARFR[1]')
+
+ # ObsError
+ q.add('obsErrorBendingAngle1', '*/ROSEQ1/ROSEQ2{1}/BNDA[2]')
+ q.add('obsErrorBendingAngle2', '*/ROSEQ1/ROSEQ2{2}/BNDA[2]')
+ q.add('obsErrorBendingAngle3', '*/ROSEQ1/ROSEQ2{3}/BNDA[2]')
+ q.add('obsErrorAtmosphericRefractivity', '*/ROSEQ3/ARFR[2]')
+
+ # ObsType
+ q.add('obsTypeBendingAngle', '*/SAID')
+ q.add('obsTypeAtmosphericRefractivity', '*/SAID')
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for making QuerySet: {running_time} seconds")
+
+ # ==============================================================
+ # Open the BUFR file and execute the QuerySet to get ResultSet
+ # Use the ResultSet returned to get numpy arrays of the data
+ # ==============================================================
+ start_time = time.time()
+
+ logger.debug(f"Executing QuerySet to get ResultSet ...")
+ with bufr.File(DATA_PATH) as f:
+ r = f.execute(q)
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: basic ...")
+ # MetaData
+ clath = r.get('latitude', 'latitude')
+ clonh = r.get('longitude', 'latitude')
+ gclath = r.get('gridLatitude', 'latitude')
+ gclonh = r.get('gridLongitude', 'latitude')
+ year = r.get('year', 'latitude')
+ year2 = r.get('year2')
+ mnth = r.get('month', 'latitude')
+ days = r.get('day', 'latitude')
+ hour = r.get('hour', 'latitude')
+ minu = r.get('minute', 'latitude')
+ seco = r.get('second', 'latitude')
+ said = r.get('satelliteIdentifier', 'latitude')
+ siid = r.get('satelliteInstrument', 'latitude')
+ sclf = r.get('satelliteConstellationRO', 'latitude')
+ ptid = r.get('satelliteTransmitterId', 'latitude')
+ elrc = r.get('earthRadiusCurvature', 'latitude')
+ seqnum = r.get('sequenceNumber', 'latitude')
+ geodu = r.get('geoidUndulation', 'latitude')
+ heit = r.get('height', 'height', type='float32').astype(np.float32)
+ impp1 = r.get('impactParameterRO_roseq2repl1', 'latitude')
+ impp2 = r.get('impactParameterRO_roseq2repl2', 'latitude')
+ impp3 = r.get('impactParameterRO_roseq2repl3', 'latitude')
+
+ mefr1 = r.get('frequency__roseq2repl1', 'latitude',
+ type='float32').astype(np.float32)
+ mefr2 = r.get('frequency__roseq2repl2', 'latitude',
+ type='float32').astype(np.float32)
+ mefr3 = r.get('frequency__roseq2repl3', 'latitude',
+ type='float32').astype(np.float32)
+ pccf = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ ref_pccf = r.get('percentConfidence', 'height')
+ bearaz = r.get('sensorAzimuthAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get MetaData: processing center...")
+ # Processing Center
+ ogce = r.get('dataProviderOrigin', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get metadata: data quality \
+ information ...")
+ # Quality Information
+ qfro = r.get('qualityFlags', 'latitude')
+ qfro2 = r.get('pccf', 'latitude', type='float32').astype(np.float32)
+ satasc = r.get('satelliteAscendingFlag', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get ObsValue: Bending Angle ...")
+ # ObsValue
+ # Bending Angle
+ bnda1 = r.get('bendingAngle_roseq2repl1', 'latitude')
+ bnda2 = r.get('bendingAngle_roseq2repl2', 'latitude')
+ bnda3 = r.get('bendingAngle_roseq2repl3', 'latitude')
+ arfr = r.get('atmosphericRefractivity', 'height')
+
+ # ObsError
+ # Bending Angle
+ bndaoe1 = r.get('obsErrorBendingAngle1', 'latitude')
+ bndaoe2 = r.get('obsErrorBendingAngle2', 'latitude')
+ bndaoe3 = r.get('obsErrorBendingAngle3', 'latitude')
+ arfroe = r.get('obsErrorAtmosphericRefractivity', 'height')
+
+ # assign sequenceNumber (SEQNUM in the bufr table is less than 1,000 and used repeatedly)
+ logger.debug(f"Assign sequence number: starting from 1")
+
+ count1 = 0
+ count2 = 0
+ seqnum2 = []
+ for i in range(len(seqnum)):
+ if (int(seqnum[i]) != count2):
+ count1 += 1
+ count2 = int(seqnum[i])
+ seqnum2.append(count1)
+ seqnum2 = np.array(seqnum2)
+
+ logger.debug(f" new seqnum2 shape, type, min/max {seqnum2.shape}, \
+ {seqnum2.dtype}, {seqnum2.min()}, {seqnum2.max()}")
+
+ # ObsType
+ # Bending Angle
+ bndaot = r.get('obsTypeBendingAngle', 'latitude')
+ arfrot = r.get('obsTypeBendingAngle', 'latitude')
+
+ logger.debug(f" ... Executing QuerySet: get datatime: observation time ...")
+ # DateTime: seconds since Epoch time
+ # IODA has no support for numpy datetime arrays dtype=datetime64[s]
+ timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute',
+ 'second', 'latitude').astype(np.int64)
+
+ logger.debug(f" ... Executing QuerySet: Done!")
+
+ logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \
+ dimension and type ...")
+ # Check BUFR variable generic dimension and type
+ logger.debug(f" clath shape, type = {clath.shape}, {clath.dtype}")
+ logger.debug(f" clonh shape, type = {clonh.shape}, {clonh.dtype}")
+ logger.debug(f" gclath shape, type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh shape, type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" year shape, type = {year.shape}, {year.dtype}")
+ logger.debug(f" mnth shape, type = {mnth.shape}, {mnth.dtype}")
+ logger.debug(f" days shape, type = {days.shape}, {days.dtype}")
+ logger.debug(f" hour shape, type = {hour.shape}, {hour.dtype}")
+ logger.debug(f" minu shape, type = {minu.shape}, {minu.dtype}")
+ logger.debug(f" seco shape, type = {seco.shape}, {seco.dtype}")
+ logger.debug(f" said shape, type = {said.shape}, {said.dtype}")
+ logger.debug(f" siid shape, type = {siid.shape}, {siid.dtype}")
+ logger.debug(f" sclf shape, type = {sclf.shape}, {sclf.dtype}")
+ logger.debug(f" ptid shape, type = {ptid.shape}, {ptid.dtype}")
+ logger.debug(f" elrc shape, type = {elrc.shape}, {elrc.dtype}")
+ logger.debug(f" seqnum shape, type = {seqnum.shape}, {seqnum.dtype}")
+ logger.debug(f" geodu shape, type = {geodu.shape}, {geodu.dtype}")
+ logger.debug(f" heit shape, type = {heit.shape}, {heit.dtype}")
+ logger.debug(f" impp1 shape, type = {impp1.shape}, {impp1.dtype}")
+ logger.debug(f" impp2 shape, type = {impp2.shape}, {impp2.dtype}")
+ logger.debug(f" impp3 shape, type = {impp3.shape}, {impp3.dtype}")
+ logger.debug(f" mefr1 shape, type = {mefr1.shape}, {mefr1.dtype}")
+ logger.debug(f" mefr3 shape, type = {mefr3.shape}, {mefr3.dtype}")
+ logger.debug(f" pccf shape, type = {pccf.shape}, {pccf.dtype}")
+ logger.debug(f" pccf shape, fill = {pccf.fill_value}")
+ logger.debug(f" ref_pccf shape, type = {ref_pccf.shape}, \
+ {ref_pccf.dtype}")
+ logger.debug(f" bearaz shape, type = {bearaz.shape}, {bearaz.dtype}")
+
+ logger.debug(f" ogce shape, type = {ogce.shape}, {ogce.dtype}")
+
+ logger.debug(f" qfro shape, type = {qfro.shape}, {qfro.dtype}")
+ logger.debug(f" satasc shape, type = {satasc.shape}, {satasc.dtype}")
+
+ logger.debug(f" bnda1 shape, type = {bnda1.shape}, {bnda1.dtype}")
+ logger.debug(f" bnda3 shape, type = {bnda3.shape}, {bnda3.dtype}")
+ logger.debug(f" arfr shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaoe1 shape, type = {bndaoe1.shape}, \
+ {bndaoe1.dtype}")
+ logger.debug(f" bndaoe3 shape, type = {bndaoe3.shape}, \
+ {bndaoe3.dtype}")
+ logger.debug(f" arfroe shape, type = {arfr.shape}, {arfr.dtype}")
+
+ logger.debug(f" bndaot shape, type = {bndaot.shape}, {bndaot.dtype}")
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for executing QuerySet to get ResultSet: \
+ {running_time} seconds")
+
+ # =========================
+ # Create derived variables
+ # =========================
+ start_time = time.time()
+
+ logger.debug(f"Creating derived variables - stationIdentification")
+ stid = Derive_stationIdentification(said, ptid)
+
+ logger.debug(f" stid shape,type = {stid.shape}, {stid.dtype}")
+
+ logger.debug(f"Creating derived variables - Grid Latitude / Longitude ...")
+ gclonh = Compute_Grid_Location(gclonh)
+ gclath = Compute_Grid_Location(gclath)
+
+ logger.debug(f" gclonh shape,type = {gclonh.shape}, {gclonh.dtype}")
+ logger.debug(f" gclath shape,type = {gclath.shape}, {gclath.dtype}")
+ logger.debug(f" gclonh min/max = {gclonh.min()}, {gclonh.max()}")
+ logger.debug(f" gclath min/max = {gclath.min()}, {gclath.max()}")
+
+ logger.debug(f"Creating derived variables - imph ...")
+
+ imph1 = Compute_imph(impp1, elrc, geodu)
+ imph2 = Compute_imph(impp2, elrc, geodu)
+ imph3 = Compute_imph(impp3, elrc, geodu)
+
+ logger.debug(f" imph1 shape,type = {imph1.shape}, {imph1.dtype}")
+ logger.debug(f" imph3 shape,type = {imph3.shape}, {imph3.dtype}")
+ logger.debug(f" imph1 min/max = {imph1.min()}, {imph1.max()}")
+ logger.debug(f" imph3 min/max = {imph3.min()}, {imph3.max()}")
+
+ logger.debug(f"Keep bending angle with Freq = 0.0")
+ for i in range(len(said)):
+ if (mefr2[i] == 0.0):
+ bnda1[i] = bnda2[i]
+ mefr1[i] = mefr2[i]
+ impp1[i] = impp2[i]
+ imph1[i] = imph2[i]
+ bndaoe1[i] = bndaoe2[i]
+ if (mefr3[i] == 0.0):
+ bnda1[i] = bnda3[i]
+ mefr1[i] = mefr3[i]
+ impp1[i] = impp3[i]
+ imph1[i] = imph3[i]
+ bndaoe1[i] = bndaoe3[i]
+
+ logger.debug(f" new bnda1 shape, type, min/max {bnda1.shape}, \
+ {bnda1.dtype}, {bnda1.min()}, {bnda1.max()}")
+ logger.debug(f" new mefr1 shape, type, min/max {mefr1.shape}, \
+ {mefr1.dtype}, {mefr1.min()}, {mefr1.max()}")
+ logger.debug(f" mefr2 shape, type, min/max {mefr2.shape}, \
+ {mefr2.dtype}, {mefr2.min()}, {mefr2.max()}")
+ logger.debug(f" mefr3 shape, type, min/max {mefr3.shape}, \
+ {mefr3.dtype}, {mefr3.min()}, {mefr3.max()}")
+ logger.debug(f" new impp1 shape, type, min/max {impp1.shape}, \
+ {impp1.dtype}, {impp1.min()}, {impp1.max()}")
+ logger.debug(f" new imph1 shape, type, min/max {imph1.shape}, \
+ {imph1.dtype}, {imph1.min()}, {imph1.max()}")
+ logger.debug(f" new bndaoe1 shape, type, min/max {bndaoe1.shape}, \
+ {bndaoe1.dtype}, {bndaoe1.min()}, {bndaoe1.max()}")
+
+# find ibit for qfro (16bit from left to right)
+# bit5=1, reject the bending angle obs
+# bit6=1, reject the refractivity obs
+ bit3 = []
+ bit5 = []
+ bit6 = []
+ for quality in qfro:
+ if quality & 8192 > 0:
+ bit3.append(1)
+ else:
+ bit3.append(0)
+
+ if quality & 2048 > 0:
+ bit5.append(1)
+ else:
+ bit5.append(0)
+
+ # For refractivity data use only:
+ if quality & 1024 > 0:
+ bit6.append(1)
+ else:
+ bit6.append(0)
+
+ bit3 = np.array(bit3)
+ bit5 = np.array(bit5)
+ bit6 = np.array(bit6)
+ logger.debug(f" new bit3 shape, type, min/max {bit3.shape}, \
+ {bit3.dtype}, {bit3.min()}, {bit3.max()}")
+
+# overwrite satelliteAscendingFlag and QFRO
+ for quality in range(len(bit3)):
+ satasc[quality] = 0
+ qfro2[quality] = 0.0
+ if bit3[quality] == 1:
+ satasc[quality] = 1
+ # if (bit6[quality] == 1): refractivity data only
+ # qfro2[quality] = 1.0
+ if (bit5[quality] == 1):
+ qfro2[quality] = 1.0
+
+ logger.debug(f" new satasc shape, type, min/max {satasc.shape}, \
+ {satasc.dtype}, {satasc.min()}, {satasc.max()}")
+ logger.debug(f" new qfro2 shape, type, min/max {qfro2.shape}, \
+ {qfro2.dtype}, {qfro2.min()}, {qfro2.max()}, {qfro2.fill_value}")
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for creating derived variables: {running_time} \
+ seconds")
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Find unique satellite identifiers in data to process
+ mission_said = []
+ for sensor_satellite_info in satellite_info_array:
+ mission_said.append(float(sensor_satellite_info["satellite_id"]))
+ mission_said = np.array(mission_said)
+
+ unique_satids = np.unique(said)
+ logger.debug(f" ... Number of Unique satellite identifiers: \
+ {len(unique_satids)}")
+ logger.debug(f" ... Unique satellite identifiers: {unique_satids}")
+
+ print(' ... Number of Unique satellite identifiers: ', len(unique_satids))
+ print(' ... Unique satellite identifiers: ', unique_satids)
+ print(' ... mission_said: ', mission_said)
+
+ print(' ... Loop through unique satellite identifier ... : ', unique_satids)
+
+ nobs = 0
+ for sat in unique_satids.tolist():
+ print("Processing output for said: ", sat)
+ start_time = time.time()
+
+ # Find matched sensor_satellite_info from sensor_satellite_info namedtuple
+ matched = False
+ for sensor_satellite_info in satellite_info_array:
+ if (sensor_satellite_info["satellite_id"] == sat):
+
+ matched = True
+ sensor_id = sensor_satellite_info["sensor_id"]
+ sensor_name = sensor_satellite_info["sensor_name"]
+ sensor_full_name = sensor_satellite_info["sensor_full_name"]
+ satellite_id = sensor_satellite_info["satellite_id"]
+ satellite_name = sensor_satellite_info["satellite_name"]
+ satellite_full_name = sensor_satellite_info["satellite_full_name"]
+
+ if matched:
+
+ print(' ... Split data for satellite mission ', mission)
+
+ # Define a boolean mask to subset data from the original data object
+ mask = np.isin(said, mission_said)
+
+ # MetaData
+ clonh_sat = clonh[mask]
+ clath_sat = clath[mask]
+ gclonh_sat = gclonh[mask]
+ gclath_sat = gclath[mask]
+ timestamp_sat = timestamp[mask]
+ stid_sat = stid[mask]
+ said_sat = said[mask]
+ siid_sat = siid[mask]
+ sclf_sat = sclf[mask]
+ ptid_sat = ptid[mask]
+ elrc_sat = elrc[mask]
+ seqnum2_sat = seqnum2[mask]
+ geodu_sat = geodu[mask]
+ heit_sat = heit[mask]
+ impp1_sat = impp1[mask]
+ imph1_sat = imph1[mask]
+ mefr1_sat = mefr1[mask]
+ pccf_sat = pccf[mask]
+ ref_pccf_sat = ref_pccf[mask]
+ bearaz_sat = bearaz[mask]
+ ogce_sat = ogce[mask]
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ # Processing Center
+ ogce_sat = ogce[mask]
+
+ # QC Info
+ qfro_sat = qfro[mask]
+ qfro2_sat = qfro2[mask]
+ satasc_sat = satasc[mask]
+
+ # ObsValue
+ bnda1_sat = bnda1[mask]
+ arfr_sat = arfr[mask]
+
+ # ObsError
+ bndaoe1_sat = bndaoe1[mask]
+ arfroe_sat = arfroe[mask]
+
+ # ObsType
+ bndaot_sat = bndaot[mask]
+ arfrot_sat = arfrot[mask]
+
+ nobs = clath_sat.shape[0]
+ print(' ... Create ObsSpace for satid = ', sat)
+ print(' ... size location of sat mission = ', nobs)
+
+ # =====================================
+ # Create IODA ObsSpace
+ # Write IODA output
+ # =====================================
+
+ # Create the dimensions
+ if nobs > 0:
+ dims = {'Location': np.arange(0, nobs)}
+ print(' ... dim = ', nobs)
+ else:
+ dims = {'Location': nobs}
+ print(' ... dim = ', nobs)
+
+ iodafile = f"{cycle_type}.t{hh}z.{ioda_data_type}_{mission}.tm00.nc"
+
+ OUTPUT_PATH = os.path.join(ioda_dir, iodafile)
+
+ print(' ... ... Create OUTPUT file:', OUTPUT_PATH)
+
+ path, fname = os.path.split(OUTPUT_PATH)
+ if path and not os.path.exists(path):
+ os.makedirs(path)
+
+ # Create IODA ObsSpace
+ obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims)
+
+ # Create Global attributes
+ logger.debug(f" ... ... Create global attributes")
+ obsspace.write_attr('source_file', bufrfile)
+ obsspace.write_attr('dataOriginalFormatSpec', data_format)
+ obsspace.write_attr('data_type', data_type)
+ obsspace.write_attr('subsets', subsets)
+ obsspace.write_attr('cycle_type', cycle_type)
+ obsspace.write_attr('cycle_datetime', cycle)
+ obsspace.write_attr('dataProviderOrigin', data_provider)
+ obsspace.write_attr('data_description', data_description)
+ obsspace.write_attr('converter', os.path.basename(__file__))
+
+ if nobs > 0:
+ # Create IODA variables
+ logger.debug(f" ... ... Create variables: name, type, units, & attributes")
+ # Longitude
+ obsspace.create_var('MetaData/longitude', dtype=clonh_sat.dtype,
+ fillval=clonh_sat.fill_value) \
+ .write_attr('units', 'degrees_east') \
+ .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \
+ .write_attr('long_name', 'Longitude') \
+ .write_data(clonh_sat)
+
+ # Latitude
+ obsspace.create_var('MetaData/latitude', dtype=clath_sat.dtype,
+ fillval=clath_sat.fill_value) \
+ .write_attr('units', 'degrees_north') \
+ .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \
+ .write_attr('long_name', 'Latitude') \
+ .write_data(clath_sat)
+
+ # Grid Longitude
+ obsspace.create_var('MetaData/gridLongitude', dtype=gclonh_sat.dtype,
+ fillval=gclonh_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-3.14159265, 3.14159265],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Longitude') \
+ .write_data(gclonh_sat)
+
+ # Grid Latitude
+ obsspace.create_var('MetaData/gridLatitude', dtype=gclath_sat.dtype,
+ fillval=gclath_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('valid_range', np.array([-1.570796325, 1.570796325],
+ dtype=np.float32)) \
+ .write_attr('long_name', 'Grid Latitude') \
+ .write_data(gclath_sat)
+
+ # Datetime
+ obsspace.create_var('MetaData/dateTime', dtype=np.int64,
+ fillval=timestamp_sat.fill_value) \
+ .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \
+ .write_attr('long_name', 'Datetime') \
+ .write_data(timestamp_sat)
+
+ # Station Identification
+ obsspace.create_var('MetaData/stationIdentification', dtype=stid_sat.dtype,
+ fillval=stid_sat.fill_value) \
+ .write_attr('long_name', 'Station Identification') \
+ .write_data(stid_sat)
+
+ # Satellite Identifier
+ obsspace.create_var('MetaData/satelliteIdentifier', dtype=said_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Identifier') \
+ .write_data(said_sat)
+
+ # Satellite Instrument
+ obsspace.create_var('MetaData/satelliteInstrument', dtype=siid_sat.dtype,
+ fillval=siid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Instrument') \
+ .write_data(siid_sat)
+
+ # Satellite Constellation RO
+ obsspace.create_var('MetaData/satelliteConstellationRO', dtype=sclf_sat.dtype,
+ fillval=sclf_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Constellation RO') \
+ .write_data(sclf_sat)
+
+ # Satellite Transmitter ID
+ obsspace.create_var('MetaData/satelliteTransmitterId', dtype=ptid_sat.dtype,
+ fillval=ptid_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Transmitter Id') \
+ .write_data(ptid_sat)
+
+ # Earth Radius Curvature
+ obsspace.create_var('MetaData/earthRadiusCurvature', dtype=elrc_sat.dtype,
+ fillval=elrc_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Earth Radius of Curvature') \
+ .write_data(elrc_sat)
+
+ # Sequence Number
+ obsspace.create_var('MetaData/sequenceNumber', dtype=seqnum2_sat.dtype,
+ fillval=said_sat.fill_value) \
+ .write_attr('long_name', 'Sequence Number') \
+ .write_data(seqnum2_sat)
+
+ # Geoid Undulation
+ obsspace.create_var('MetaData/geoidUndulation', dtype=geodu_sat.dtype,
+ fillval=geodu_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Geoid Undulation') \
+ .write_data(geodu_sat)
+
+ # Height
+ obsspace.create_var('MetaData/height', dtype=heit_sat.dtype,
+ fillval=heit_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Height for Atm Refractivity') \
+ .write_data(heit_sat)
+
+ # Impact Parameter RO
+ obsspace.create_var('MetaData/impactParameterRO', dtype=impp1_sat.dtype,
+ fillval=impp1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Parameter Bending Angle') \
+ .write_data(impp1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/impactHeightRO', dtype=imph1_sat.dtype,
+ fillval=imph1_sat.fill_value) \
+ .write_attr('units', 'm') \
+ .write_attr('long_name', 'Impact Height Bending Angle') \
+ .write_data(imph1_sat)
+
+ # Impact Height RO
+ obsspace.create_var('MetaData/frequency', dtype=mefr1_sat.dtype,
+ fillval=mefr1_sat.fill_value) \
+ .write_attr('units', 'Hz') \
+ .write_attr('long_name', 'Frequency') \
+ .write_data(mefr1_sat)
+
+ # PCCF Percent Confidence
+ obsspace.create_var('MetaData/pccf', dtype=pccf_sat.dtype,
+ fillval=pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Profile Percent Confidence') \
+ .write_data(pccf_sat)
+
+ # PCCF Ref Percent Confidence
+ obsspace.create_var('MetaData/percentConfidence', dtype=ref_pccf_sat.dtype,
+ fillval=ref_pccf_sat.fill_value) \
+ .write_attr('units', '%') \
+ .write_attr('long_name', 'Ref Percent Confidence') \
+ .write_data(ref_pccf_sat)
+
+ # Azimuth Angle
+ obsspace.create_var('MetaData/sensorAzimuthAngle', dtype=bearaz_sat.dtype,
+ fillval=bearaz_sat.fill_value) \
+ .write_attr('units', 'degree') \
+ .write_attr('long_name', 'Percent Confidence') \
+ .write_data(bearaz_sat)
+
+ # Data Provider
+ obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce_sat.dtype,
+ fillval=ogce_sat.fill_value) \
+ .write_attr('long_name', 'Identification of Originating Center') \
+ .write_data(ogce_sat)
+
+ # Quality: Quality Flags
+ obsspace.create_var('MetaData/qfro', dtype=qfro_sat.dtype,
+ fillval=qfro_sat.fill_value) \
+ .write_attr('long_name', 'QFRO') \
+ .write_data(qfro_sat)
+
+ obsspace.create_var('MetaData/qualityFlags', dtype=qfro2_sat.dtype,
+ fillval=qfro2_sat.fill_value) \
+ .write_attr('long_name', 'Quality Flags for QFRO bit5 and bit6') \
+ .write_data(qfro2_sat)
+
+ # Quality: Satellite Ascending Flag
+ obsspace.create_var('MetaData/satelliteAscendingFlag', dtype=satasc_sat.dtype,
+ fillval=satasc_sat.fill_value) \
+ .write_attr('long_name', 'Satellite Ascending Flag') \
+ .write_data(satasc_sat)
+
+ # ObsValue: Bending Angle
+ obsspace.create_var('ObsValue/bendingAngle', dtype=bnda1_sat.dtype,
+ fillval=bnda1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle') \
+ .write_data(bnda1_sat)
+
+ # ObsValue: Atmospheric Refractivity
+ obsspace.create_var('ObsValue/atmosphericRefractivity', dtype=arfr_sat.dtype,
+ fillval=arfr_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity') \
+ .write_data(arfr_sat)
+
+ # ObsError: Bending Angle
+ obsspace.create_var('ObsError/bendingAngle', dtype=bndaoe1_sat.dtype,
+ fillval=bndaoe1_sat.fill_value) \
+ .write_attr('units', 'radians') \
+ .write_attr('long_name', 'Bending Angle Obs Error') \
+ .write_data(bndaoe1_sat)
+
+ # ObsError: Atmospheric Refractivity
+ obsspace.create_var('ObsError/atmosphericRefractivity', dtype=arfroe_sat.dtype,
+ fillval=arfroe_sat.fill_value) \
+ .write_attr('units', 'N-units') \
+ .write_attr('long_name', 'Atmospheric Refractivity Obs Error') \
+ .write_data(arfroe_sat)
+
+ # ObsType: Bending Angle
+ obsspace.create_var('ObsType/bendingAngle', dtype=bndaot_sat.dtype,
+ fillval=bndaot_sat.fill_value) \
+ .write_attr('long_name', 'Bending Angle ObsType') \
+ .write_data(bndaot_sat)
+
+ # ObsType: Atmospheric Refractivity
+ obsspace.create_var('ObsType/atmosphericRefractivity', dtype=arfrot_sat.dtype,
+ fillval=arfrot_sat.fill_value) \
+ .write_attr('long_name', 'Atmospheric Refractivity ObsType') \
+ .write_data(arfrot_sat)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Running time for splitting and output IODA for gnssro bufr: \
+ {running_time} seconds")
+
+ logger.debug("All Done!")
+
+
+if __name__ == '__main__':
+
+ start_time = time.time()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config', type=str,
+ help='Input JSON configuration', required=True)
+ parser.add_argument('-v', '--verbose',
+ help='print debug logging information',
+ action='store_true')
+ args = parser.parse_args()
+
+ log_level = 'DEBUG' if args.verbose else 'INFO'
+ logger = Logger('bufr2ioda_gnssro.py', level=log_level,
+ colored_log=True)
+
+ with open(args.config, "r") as json_file:
+ config = json.load(json_file)
+
+ bufr_to_ioda(config, logger)
+
+ end_time = time.time()
+ running_time = end_time - start_time
+ logger.debug(f"Total running time: {running_time} seconds")
diff --git a/ush/soca/soca_vrfy.py b/ush/soca/soca_vrfy.py
deleted file mode 100755
index 6ad0ec27e..000000000
--- a/ush/soca/soca_vrfy.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#!/usr/bin/env python3
-
-# make plots for marine analysis
-
-import matplotlib.pyplot as plt
-import xarray as xr
-import cartopy
-import cartopy.crs as ccrs
-import numpy as np
-import os
-
-
-projs = {'North': ccrs.NorthPolarStereo(),
- 'South': ccrs.SouthPolarStereo(),
- 'Global': ccrs.Mollweide(central_longitude=-150)}
-
-
-def plotConfig(grid_file=[],
- data_file=[],
- layer_file=[],
- variable=[],
- PDY=os.getenv('PDY'),
- cyc=os.getenv('cyc'),
- exp=os.getenv('PSLOT'),
- levels=[],
- bounds=[],
- colormap=[],
- max_depth=np.nan,
- max_depths=[700.0, 5000.0],
- comout=[],
- variables_horiz={},
- variables_zonal={},
- variables_meridional={},
- lat=np.nan,
- lats=np.arange(-60, 60, 10),
- lon=np.nan,
- lons=np.arange(-280, 80, 30),
- proj='set me',
- projs=['Global']):
-
- # Map variable names to their units
- variable_units = {
- 'ave_ssh': 'meter',
- 'Temp': 'deg C',
- 'Salt': 'psu',
- 'aice_h': 'unitless',
- 'hi_h': 'meter',
- 'hs_h': 'meter',
- 'u': 'm/s',
- 'v': 'm/s'
- }
-
- """
- Prepares the configuration for the plotting functions below
- """
- config = {}
- config['comout'] = comout # output directory
- config['grid file'] = grid_file
- config['fields file'] = data_file
- config['layer file'] = layer_file
- config['PDY'] = PDY
- config['cyc'] = cyc
- config['exp'] = exp
- config['levels'] = [1]
- config['colormap'] = colormap
- config['bounds'] = bounds
- config['lats'] = lats # all the lats to plot
- config['lat'] = lat # the lat being currently plotted
- config['lons'] = lons # all the lons to plot
- config['lon'] = lon # the lon being currently plotted
- config['max depths'] = max_depths # all the max depths to plot
- config['max depth'] = max_depth # the max depth currently plotted
- config['horiz variables'] = variables_horiz # all the vars for horiz plots
- config['zonal variables'] = variables_zonal # all the vars for zonal plots
- config['meridional variables'] = variables_meridional # all the vars for meridional plots
- config['variable'] = variable # the variable currently plotted
- config['projs'] = projs # all the projections etc.
- config['proj'] = proj
-
- # Add units to the config for each variable
- config['variable_units'] = variable_units
- return config
-
-
-def plotHorizontalSlice(config):
- """
- Contourf of a horizontal slice of an ocean field
- """
- grid = xr.open_dataset(config['grid file'])
- data = xr.open_dataset(config['fields file'])
-
- dirname = os.path.join(config['comout'], config['variable'])
- os.makedirs(dirname, exist_ok=True)
-
- variable = config['variable']
- unit = config['variable_units'].get(config['variable'], 'unknown')
- exp = config['exp']
- PDY = config['PDY']
- cyc = config['cyc']
-
- if variable in ['Temp', 'Salt', 'u', 'v']:
- level = config['levels'][0]
- slice_data = np.squeeze(data[variable])[level, :, :]
- label_colorbar = f"{variable} ({unit}) Level {level}"
- figname = os.path.join(dirname, variable + '_Level_' + str(level))
- title = f"{exp} {PDY} {cyc} {variable} Level {level}"
- else:
- slice_data = np.squeeze(data[variable])
- label_colorbar = f"{variable} ({unit})"
- figname = os.path.join(dirname, variable + '_' + config['proj'])
- title = f"{exp} {PDY} {cyc} {variable}"
-
- bounds = config['horiz variables'][variable]
- slice_data = np.clip(slice_data, bounds[0], bounds[1])
-
- fig, ax = plt.subplots(figsize=(8, 5), subplot_kw={'projection': projs[config['proj']]})
-
- # Use pcolor to plot the data
- pcolor_plot = ax.pcolormesh(np.squeeze(grid.lon),
- np.squeeze(grid.lat),
- slice_data,
- vmin=bounds[0], vmax=bounds[1],
- transform=ccrs.PlateCarree(),
- cmap=config['colormap'],
- zorder=0)
-
- # Add colorbar for filled contours
- cbar = fig.colorbar(pcolor_plot, ax=ax, shrink=0.75, orientation='horizontal')
- cbar.set_label(label_colorbar)
-
- # Add contour lines with specified linewidths
- contour_levels = np.linspace(bounds[0], bounds[1], 5)
- ax.contour(np.squeeze(grid.lon),
- np.squeeze(grid.lat),
- slice_data,
- levels=contour_levels,
- colors='black',
- linewidths=0.1,
- transform=ccrs.PlateCarree(),
- zorder=2)
-
- try:
- ax.coastlines() # TODO: make this work on hpc
- except Exception as e:
- print(f"Warning: could not add coastlines. {e}")
- ax.set_title(title)
- if config['proj'] == 'South':
- ax.set_extent([-180, 180, -90, -50], ccrs.PlateCarree())
- if config['proj'] == 'North':
- ax.set_extent([-180, 180, 50, 90], ccrs.PlateCarree())
- # ax.add_feature(cartopy.feature.LAND) # TODO: make this work on hpc
- plt.savefig(figname, bbox_inches='tight', dpi=300)
- plt.close(fig)
-
-
-def plotZonalSlice(config):
- """
- Contourf of a zonal slice of an ocean field
- """
- variable = config['variable']
- unit = config['variable_units'].get(config['variable'], 'unknown')
- exp = config['exp']
- PDY = config['PDY']
- cyc = config['cyc']
- lat = float(config['lat'])
- grid = xr.open_dataset(config['grid file'])
- data = xr.open_dataset(config['fields file'])
- layer = xr.open_dataset(config['layer file'])
- lat_index = np.argmin(np.array(np.abs(np.squeeze(grid.lat)[:, 0] - lat)))
- slice_data = np.squeeze(np.array(data[variable]))[:, lat_index, :]
- depth = np.squeeze(np.array(layer['h']))[:, lat_index, :]
- depth[np.where(np.abs(depth) > 10000.0)] = 0.0
- depth = np.cumsum(depth, axis=0)
- bounds = config['zonal variables'][variable]
- slice_data = np.clip(slice_data, bounds[0], bounds[1])
- x = np.tile(np.squeeze(grid.lon[:, lat_index]), (np.shape(depth)[0], 1))
-
- fig, ax = plt.subplots(figsize=(8, 5))
-
- # Plot the filled contours
- contourf_plot = ax.contourf(x, -depth, slice_data,
- levels=np.linspace(bounds[0], bounds[1], 100),
- vmin=bounds[0], vmax=bounds[1],
- cmap=config['colormap'])
-
- # Add contour lines with specified linewidths
- contour_levels = np.linspace(bounds[0], bounds[1], 5)
- ax.contour(x, -depth, slice_data,
- levels=contour_levels,
- colors='black',
- linewidths=0.1)
-
- # Add colorbar for filled contours
- cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal')
- cbar.set_label(f"{config['variable']} ({unit}) Lat {lat}")
-
- # Set the colorbar ticks
- cbar.set_ticks(contour_levels)
- contourf_plot.set_clim(bounds[0], bounds[1])
-
- ax.set_ylim(-config['max depth'], 0)
- title = f"{exp} {PDY} {cyc} {variable} lat {int(lat)}"
- ax.set_title(title)
- dirname = os.path.join(config['comout'], config['variable'])
- os.makedirs(dirname, exist_ok=True)
- figname = os.path.join(dirname, config['variable'] +
- 'zonal_lat_' + str(int(lat)) + '_' + str(int(config['max depth'])) + 'm')
- plt.savefig(figname, bbox_inches='tight', dpi=300)
- plt.close(fig)
-
-
-def plotMeridionalSlice(config):
- """
- Contourf of a Meridional slice of an ocean field
- """
- variable = config['variable']
- unit = config['variable_units'].get(config['variable'], 'unknown')
- exp = config['exp']
- PDY = config['PDY']
- cyc = config['cyc']
- lon = float(config['lon'])
- grid = xr.open_dataset(config['grid file'])
- data = xr.open_dataset(config['fields file'])
- layer = xr.open_dataset(config['layer file'])
- lon_index = np.argmin(np.array(np.abs(np.squeeze(grid.lon)[0, :] - lon)))
- slice_data = np.squeeze(np.array(data[config['variable']]))[:, :, lon_index]
- depth = np.squeeze(np.array(layer['h']))[:, :, lon_index]
- depth[np.where(np.abs(depth) > 10000.0)] = 0.0
- depth = np.cumsum(depth, axis=0)
- bounds = config['meridional variables'][variable]
- slice_data = np.clip(slice_data, bounds[0], bounds[1])
- y = np.tile(np.squeeze(grid.lat)[:, lon_index], (np.shape(depth)[0], 1))
-
- fig, ax = plt.subplots(figsize=(8, 5))
-
- # Plot the filled contours
- contourf_plot = ax.contourf(y, -depth, slice_data,
- levels=np.linspace(bounds[0], bounds[1], 100),
- vmin=bounds[0], vmax=bounds[1],
- cmap=config['colormap'])
-
- # Add contour lines with specified linewidths
- contour_levels = np.linspace(bounds[0], bounds[1], 5)
- ax.contour(y, -depth, slice_data,
- levels=contour_levels,
- colors='black',
- linewidths=0.1)
-
- # Add colorbar for filled contours
- cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal')
- cbar.set_label(f"{config['variable']} ({unit}) Lon {lon}")
-
- # Set the colorbar ticks
- cbar.set_ticks(contour_levels)
- contourf_plot.set_clim(bounds[0], bounds[1])
-
- ax.set_ylim(-config['max depth'], 0)
- title = f"{exp} {PDY} {cyc} {variable} lon {int(lon)}"
- ax.set_title(title)
- dirname = os.path.join(config['comout'], config['variable'])
- os.makedirs(dirname, exist_ok=True)
- figname = os.path.join(dirname, config['variable'] +
- 'meridional_lon_' + str(int(lon)) + '_' + str(int(config['max depth'])) + 'm')
- plt.savefig(figname, bbox_inches='tight', dpi=300)
- plt.close(fig)
-
-
-class statePlotter:
-
- def __init__(self, config_dict):
- self.config = config_dict
-
- def plot(self):
- # Loop over variables, slices (horiz and vertical) and projections ... and whatever else is needed
-
- #######################################
- # zonal slices
-
- for lat in self.config['lats']:
- self.config['lat'] = lat
-
- for max_depth in self.config['max depths']:
- self.config['max depth'] = max_depth
-
- variableBounds = self.config['zonal variables']
- for variable in variableBounds.keys():
- bounds = variableBounds[variable]
- self.config.update({'variable': variable, 'bounds': bounds})
- plotZonalSlice(self.config)
-
- #######################################
- # Meridional slices
-
- for lon in self.config['lons']:
- self.config['lon'] = lon
-
- for max_depth in self.config['max depths']:
- self.config['max depth'] = max_depth
-
- variableBounds = self.config['meridional variables']
- for variable in variableBounds.keys():
- bounds = variableBounds[variable]
- self.config.update({'variable': variable, 'bounds': bounds})
- plotMeridionalSlice(self.config)
-
- #######################################
- # Horizontal slices
- for proj in self.config['projs']:
-
- variableBounds = self.config['horiz variables']
- for variable in variableBounds.keys():
- bounds = variableBounds[variable]
- self.config.update({'variable': variable, 'bounds': bounds, 'proj': proj})
- plotHorizontalSlice(self.config)
diff --git a/utils/soca/fig_gallery/README.md b/utils/soca/fig_gallery/README.md
deleted file mode 100644
index d021a7d81..000000000
--- a/utils/soca/fig_gallery/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-## How to generate the EVA and State space figures
-
-#### Create a scratch place to run `run_vrfy.py`. This script will generate a bunch of sbatch scripts and logs.
-```
-mkdir /somewhere/scratch
-cd /somewhere/scratch
-ln -s /path/to/run_vrfy.py . # to be sorted out properly in the future
-cp /path/to/vrfy_config.yaml .
-module use ...
-module load EVA/....
-```
----
-#### Edit `vrfy_config.yaml`
-It's actually read as a jinja template to render `pslot` if necessary. Anything that is a templated variable in `vrfy_jobcard.sh.j2` can be added to the yaml below.
-```yaml
-pslot: "nomlb"
-start_pdy: '20210701'
-end_pdy: '20210701'
-cycs: ["00", "06", "12", "18"]
-run: "gdas"
-homegdas: "/work2/noaa/da/gvernier/runs/mlb/GDASApp"
-base_exp_path: "/work2/noaa/da/gvernier/runs/mlb/{{ pslot }}/COMROOT/{{ pslot }}"
-plot_ensemble_b: "OFF"
-plot_parametric_b: "OFF"
-plot_background: "OFF"
-plot_increment: "ON"
-plot_analysis: "OFF"
-eva_plots: "ON"
-qos: "batch"
-hpc: "hercules"
-eva_module: "EVA/orion"
-```
-
----
-#### Run the application
-```python run_vrfy.py vrfy_config.yaml```
-This will generate and submit the job cards for all the **cycles** defined by `cycs`, from `start_pdy` to `end_pdy`.
-
diff --git a/utils/soca/fig_gallery/gdassoca_obsstats.py b/utils/soca/fig_gallery/gdassoca_obsstats.py
deleted file mode 100644
index 00c60b971..000000000
--- a/utils/soca/fig_gallery/gdassoca_obsstats.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python3
-
-
-# creates figures of timeseries from the csv outputs computed by gdassoca_obsstats.x
-import argparse
-from itertools import product
-import os
-import glob
-import pandas as pd
-from jinja2 import Template
-import matplotlib.pyplot as plt
-import matplotlib.dates as mdates
-
-colors = [
- "lightsteelblue",
- "lightgreen",
- "peachpuff",
- "lightpink",
- "lightgoldenrodyellow",
- "paleturquoise",
- "lightcoral",
- "palegreen",
- "palegoldenrod",
- "mistyrose",
- "lavender"
- "lightsalmon",
-]
-
-def get_inst(csv_file_name):
- """Extract the instrument name from the csv file name. gdas.t00z.ocn.sst_ahi_h08_l3c.stats.csv -> sst_ahi_h08_l3c"""
- return csv_file_name.split('.')[-3]
-
-class ObsStats:
- def __init__(self):
- self.data = pd.DataFrame()
-
- def read_csv(self, filepaths):
- # Iterate through the list of file paths and append their data
- for filepath in filepaths:
- new_data = pd.read_csv(filepath)
-
- # Convert date to datetime for easier plotting
- new_data['date'] = pd.to_datetime(new_data['date'], format='%Y%m%d%H')
- self.data = pd.concat([self.data, new_data], ignore_index=True)
- self.data.sort_values('date', inplace=True)
-
- def plot_timeseries(self, ocean, variable, inst="", dirout=""):
-
- # Filter data for the given ocean and variable
- filtered_data = self.data[(self.data['Ocean'] == ocean) & (self.data['Variable'] == variable)]
- if filtered_data.empty:
- print("No data available for the given ocean and variable combination.")
- return []
-
- # Get unique experiments
- experiments = filtered_data['Exp'].unique()
- experiments.sort()
- print(experiments)
-
- # Plot settings
- fig, axs = plt.subplots(3, 1, figsize=(10, 15), sharex=True)
- fig.suptitle(f'{inst} {variable} statistics, {ocean} ocean', fontsize=18, fontweight='bold')
-
-
- exp_counter = 0
- for exp in experiments:
- exp_data = self.data[(self.data['Ocean'] == ocean) &
- (self.data['Variable'] == variable) &
- (self.data['Exp'] == exp)]
-
- # Plot RMSE
- axs[0].plot(exp_data['date'], exp_data['RMSE'], marker='o', linestyle='-', color=colors[exp_counter], label=exp)
- axs[0].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H'))
- axs[0].xaxis.set_major_locator(mdates.DayLocator())
- axs[0].tick_params(labelbottom=False)
- axs[0].set_ylabel('RMSE', fontsize=18, fontweight='bold')
- axs[0].legend()
- axs[0].grid(True)
-
- # Plot Bias
- axs[1].plot(exp_data['date'], exp_data['Bias'], marker='o', linestyle='-', color=colors[exp_counter], label=exp)
- axs[1].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H'))
- axs[1].xaxis.set_major_locator(mdates.DayLocator())
- axs[1].tick_params(labelbottom=False)
- axs[1].set_ylabel('Bias', fontsize=18, fontweight='bold')
- axs[1].grid(True)
-
- # Plot Count
- axs[2].plot(exp_data['date'], exp_data['Count'], marker='o', linestyle='-', color=colors[exp_counter], label=exp)
- axs[2].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H'))
- axs[2].xaxis.set_major_locator(mdates.DayLocator())
- axs[2].set_ylabel('Count', fontsize=18, fontweight='bold')
- axs[2].grid(True)
-
- exp_counter += 1
-
- # Improve layout and show plot
- plt.tight_layout(rect=[0, 0.03, 1, 0.95])
- plt.savefig(f'{dirout}/{inst}_{variable}_{ocean}.png')
- # close the figure
- plt.close(fig)
-
- return experiments
-
-if __name__ == "__main__":
- epilog = ["Usage examples: ./gdassoca_obsstats.py --exps cp1/COMROOT/cp1 cp2/COMROOT/cp2 --inst sst_abi_g16_l3c --dirout cp1vscp2"]
- parser = argparse.ArgumentParser(description="Observation space RMSE's and BIAS's",
- formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog=os.linesep.join(epilog))
- parser.add_argument("--exps", nargs='+', required=True,
- help="Path to the experiment's COMROOT")
- parser.add_argument("--inst", required=True, help="The name of the instrument/platform (ex: sst_abi_g16_l3c) or a wild card (eg sst*)")
- parser.add_argument("--dirout", required=True, help="Output directory")
- args = parser.parse_args()
-
- insts = []
- inst = args.inst
- os.makedirs(args.dirout, exist_ok=True)
-
- # Get all instruments/obs spaces
- for exp in args.exps:
- wc = exp + f'/*.*/??/analysis/ocean/*{inst}*.stats.csv'
- flist = glob.glob(wc)
- for fname in flist:
- insts.append(get_inst(fname))
- insts = list(set(insts))
- insts.sort()
- print(insts)
-
- experiments = []
- for inst in insts:
- print(f"Processing {inst}")
- flist = []
- for exp in args.exps:
- wc = exp + f'/*.*/??/analysis/ocean/*{inst}*.stats.csv'
- flist.append(glob.glob(wc))
-
- flist = sum(flist, [])
- obsStats = ObsStats()
- obsStats.read_csv(flist)
- for var, ocean in product(['ombg_noqc', 'ombg_qc'],
- ['Global', 'Atlantic', 'Pacific', 'Indian', 'Arctic', 'Southern']):
- experiments.extend(obsStats.plot_timeseries(ocean, var, inst=inst, dirout=args.dirout))
-
- # Select unique elements of experiments
- experiments = list(set(experiments))
- experiments.sort()
-
- # Create the html document from the jinja2 template
- template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'gdassoca_obsstats_template.html')
- template = Template(open(template_path).read())
- context = {'insts': insts, 'experiments': experiments}
- indexhtml = template.render(context)
-
- # Write the rendered HTML to a file
- with open(f'{args.dirout}/index.html', 'w') as f:
- f.write(indexhtml)
diff --git a/utils/soca/fig_gallery/gdassoca_obsstats_template.html b/utils/soca/fig_gallery/gdassoca_obsstats_template.html
deleted file mode 100644
index ba717da0d..000000000
--- a/utils/soca/fig_gallery/gdassoca_obsstats_template.html
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- Time Series of (Observation - Background) Statistics
-
-
- Experiments: {{ experiments|join(', ') }}
-
-
-
-
- Global
- Arctic
- Atlantic
- Indian
- Pacific
- Southern
-
-
-
-
- {% for inst in insts %}
- {{ inst }}
- {% endfor %}
-
-
-
-
-
-
-
-
diff --git a/utils/soca/fig_gallery/marine_vrfy_display/README b/utils/soca/fig_gallery/marine_vrfy_display/README
deleted file mode 100644
index dde23647d..000000000
--- a/utils/soca/fig_gallery/marine_vrfy_display/README
+++ /dev/null
@@ -1,29 +0,0 @@
-################################################
-# README for index_vrfy_marine.html
-################################################
-
-To use, follow these steps:
-
-----------------------------
-
-1. Login to Hera via X2Go and type on the command line:
-
- firefox index_vrfy_marine.html
-
-2. In the webpage,
- a. Enter the path to your main verification directory. See 2b for info on what to include in this path.
- b. Are your figures in your COMROOT or from HPSS?
- If COMROOT, the main path should include everything BEFORE /gdas.YearMonthDay subdirectories.
- If HPSS, the main path should include everything BEFORE /YearMonthDayHour subdirectories.
- Enter COMROOT or HPSS.
- c. Enter the date and cycle time you wish to display.
-
-3. Refresh the webpage tab by clicking the refresh button. You should still see your entered path and date/time.
-
-4. Navigate the dropdown menu on the left and click to display figures.
-
-----------------------------
-NOTES:
- N1: If you wish to look at figures from a different experiment or for a different date/time, repeat Steps 2-4.
-
-################################################
diff --git a/utils/soca/fig_gallery/marine_vrfy_display/default.css b/utils/soca/fig_gallery/marine_vrfy_display/default.css
deleted file mode 100644
index 4ff19b726..000000000
--- a/utils/soca/fig_gallery/marine_vrfy_display/default.css
+++ /dev/null
@@ -1,690 +0,0 @@
- html, body
- {
- height: 100%;
- }
-
- body
- {
- margin: 0px;
- padding: 0px;
- background: #202020;
- font-family: 'Source Sans Pro', sans-serif;
- font-size: 12pt;
- font-weight: 300;
- color: #444444;
- }
-
-
- h1, h2, h3
- {
- margin: 0;
- padding: 0;
- font-weight: 600;
- color: #333333;
- }
-
- p, ol, ul
- {
- margin-top: 0;
- }
-
- ol, ul
- {
- padding: 0;
- list-style: none;
- }
-
- p
- {
- line-height: 180%;
- }
-
- strong
- {
- }
-
- a
- {
- color: #2980b9;
- }
-
- a:hover
- {
- text-decoration: none;
- }
-
- .container
- {
- overflow: hidden;
- margin: 0em auto;
- width: 1350px;
- }
-
-/*********************************************************************************/
-/* Image Style */
-/*********************************************************************************/
-
- .image
- {
- display: inline-block;
- }
-
- .image img
- {
- display: block;
- width: 100%;
- }
-
- .image-full
- {
- display: block;
- width: 100%;
- margin: 0 0 2em 0;
- }
-
- .image-left
- {
- float: left;
- margin: 0 2em 2em 0;
- }
-
- .image-centered
- {
- display: block;
- margin: 0 0 2em 0;
- }
-
- .image-centered img
- {
- margin: 0 auto;
- width: auto;
- }
-
-/*********************************************************************************/
-/* List Styles */
-/*********************************************************************************/
-
- ul.style1
- {
- margin: 0 auto;
- padding: 0;
- width: 80%;
- overflow: hidden;
- list-style: none;
- text-align: left;
- color: #6c6c6c
- }
-
- ul.style1 li
- {
- padding: 1.6em 0em 0em 0em;
- margin: 0 0 2.5em 0;
- border-top: 1px solid rgba(0,0,0,.1);
- }
-
- ul.style1 li:first-child
- {
- border-top: none;
- }
-
- ul.style1 p
- {
- margin: 0;
- }
-
- ul.style1 a
- {
- display: block;
- text-decoration: none;
- color: #2D2D2D;
- }
-
- ul.style1 a:hover
- {
- text-decoration: underline;
- }
-
- ul.style1 h3
- {
- padding: 1em 0em 5px 0em;
- text-transform: uppercase;
- font-size: 1em;
- font-weight: 400;
- }
-
- ul.style1 .first
- {
- padding-top: 0;
- background: none;
- }
-
- ul.style1 .date
- {
- float: left;
- position: relative;
- width: 80px;
- height: 70px;
- margin: 0.5em 1.5em 0em 0.5em;
- padding: 1.5em 0em 1.5em 0em;
- box-shadow: 0px 0px 0px 5px rgba(255,255,255,0.1);
- line-height: normal;
- text-align: center;
- text-transform: uppercase;
- text-shadow: 0px 1px 0px rgba(0,0,0,.2);
- font-size: 1em;
- font-weight: 400;
- border-right: 1px solid rgba(0,0,0,.1);
- }
-
- ul.style1 .date:after
- {
- content: '';
- display: block;
- position: absolute;
- left: 0;
- top: 0;
- width: 100%;
- height: 100%;
- border-radius: 6px;
- }
-
- ul.style1 .date b
- {
- margin: 0;
- padding: 0;
- display: block;
- margin-top: -5px;
- font-size: 1.8em;
- font-weight: 700;
- }
-
- ul.style1 .date a
- {
- }
-
- ul.style1 .date a:hover
- {
- text-decoration: none;
- }
-
-
-/*********************************************************************************/
-/* Social Icon Styles */
-/*********************************************************************************/
-
- ul.contact
- {
- margin: 0;
- padding: 2em 0em 0em 0em;
- list-style: none;
- }
-
- ul.contact li
- {
- display: inline-block;
- padding: 0em 0.30em;
- font-size: 1em;
- }
-
- ul.contact li span
- {
- display: none;
- margin: 0;
- padding: 0;
- }
-
- ul.contact li a
- {
- color: #FFF;
- }
-
- ul.contact li a:before
- {
- display: inline-block;
- background: #3f3f3f;
- width: 40px;
- height: 40px;
- line-height: 40px;
- border-radius: 20px;
- text-align: center;
- color: #FFFFFF;
- }
-
- ul.contact li a.icon-twitter:before { background: #2DAAE4; }
- ul.contact li a.icon-facebook:before { background: #39599F; }
- ul.contact li a.icon-dribbble:before { background: #C4376B; }
- ul.contact li a.icon-tumblr:before { background: #31516A; }
- ul.contact li a.icon-rss:before { background: #F2600B; }
-
-/*********************************************************************************/
-/* Button Style */
-/*********************************************************************************/
-
- .button
- {
- display: inline-block;
- padding: 1em 3em 1em 2em;
- letter-spacing: 0.20em;
- text-decoration: none;
- text-transform: uppercase;
- font-weight: 400;
- font-size: 0.90em;
- color: #FFF;
- }
-
- .button:before
- {
- display: inline-block;
- background: #FFC31F;
- margin-right: 1em;
- width: 40px;
- height: 40px;
- line-height: 40px;
- border-radius: 20px;
- text-align: center;
- color: #272925;
- }
-
-/*********************************************************************************/
-/* 4-column */
-/*********************************************************************************/
-
- .box1,
- .box2,
- .box3,
- .box4
- {
- width: 235px;
- }
-
- .box1,
- .box2,
- .box3,
- {
- float: left;
- margin-right: 20px;
- }
-
- .box4
- {
- float: right;
- }
-
-/*********************************************************************************/
-/* 3-column */
-/*********************************************************************************/
-
- .boxA,
- .boxB,
- .boxC
- {
- width: 320px;
- }
-
- .boxA,
- .boxB,
- {
- float: left;
- margin-right: 20px;
- }
-
- .boxC
- {
- float: right;
- }
-
-/*********************************************************************************/
-/* 2-column */
-/*********************************************************************************/
-
- .tbox1,
- .tbox2
- {
- width: 575px;
- }
-
- .tbox1
- {
- float: left;
- }
-
- .tbox2
- {
- float: right;
- }
-
-/*********************************************************************************/
-/* Heading Titles */
-/*********************************************************************************/
-
- .title
- {
- margin-bottom: 3em;
- }
-
- .title h2
- {
- text-transform: lowercase;
- font-size: 2.8em;
- }
-
- .title .byline
- {
- font-size: 1.3em;
- color: #6F6F6F;
- }
-
-/*********************************************************************************/
-/* Header */
-/*********************************************************************************/
-
- #header
- {
- position: relative;
- float: left;
- width: 225px;
- padding: 3em 0em;
- }
-
-/*********************************************************************************/
-/* Logo */
-/*********************************************************************************/
-
- #logo
- {
- text-align: center;
- margin-bottom: 4em;
- }
-
- #logo h1
- {
- display: block;
- }
-
- #logo a
- {
- text-decoration: none;
- color: #FFF;
- }
-
- #logo span
- {
- padding-right: 0.5em;
- text-transform: uppercase;
- font-size: 0.90em;
- color: rgba(255,255,255,0.3);
- }
-
- #logo span a
- {
- color: rgba(255,255,255,0.5);
- }
-
- #logo img
- {
- display: inline-block;
- margin-bottom: 1em;
- border-radius: 50%;
- }
-
-/*********************************************************************************/
-/* Menu */
-/*********************************************************************************/
-
- #menu
- {
- }
-
- #menu ul
- {
- }
-
- #menu li
- {
- border-top: 1px solid rgba(255,255,255,0.4);
- }
-
- #menu li a, #menu li span
- {
- display: block;
- padding: 1em 1em;
- text-align: center;
- text-decoration: none;
- text-transform: uppercase;
- font-weight: 700;
- color: rgba(255,255,255,1);
- }
-
- #menu ul li a:hover {
- background-color: #2980b9;
- border-left: solid 10px #ffffff;
- }
-
- #menu li:hover a, #menu li.active a, #menu li.active span
- {
- }
-
- #menu .current_page_item a
- {
- background: #2980b9;
- color: rgba(255,255,255,1);
- }
-
- #menu .icon
- {
- }
-
-/*********************************************************************************/
-/* Dropdown */
-/*********************************************************************************/
- .dropbtn {
- position: relative;
- display: block;
- padding: 1em 1em;
- width: 100%;
- text-align: center;
- text-decoration: none;
- text-transform: uppercase;
- font-family: 'Source Sans Pro', sans-serif;
- font-size: 12pt;
- font-weight: 700;
- color: rgba(255,255,255,1);
- background-color: #112e51;
- border-top: 1px solid rgba(255,255,255,0.4);
- border-left: none;
- border-bottom: none;
- border-right: none;
- cursor: pointer;
- }
-
- #active {
- background-color: #2980b9;
- color: rgba(255,255,255,1);
- }
-
- .dropbtn:focus {
- background-color: #2980b9;
- border-left: solid 10px #ffffff;
- display: block;
- }
-
- .dropdown {
- position: relative;
- display: inline-block;
- width: 100%;
- }
-
- /* NESTED DROPDOWNS */
- /* The dropdown content (submenus) */
- .dropdown-content {
- display: none;
- left: 100%;
- top: -50%;
- position: absolute;
- background-color: #112e51;
- min-width: 160px;
- z-index: 1;
- }
-
- .dropdown-content a {
- display: block;
- color: rgba(255,255,255,1);
- padding: 1em 1em;
- border: 1px solid rgba(255,255,255,0.4);
- text-align: center;
- text-decoration: none;
- }
-
- .dropdown-content a:hover {
- background-color: #575757;
- }
-
- /* Style for the submenu */
- .submenu {
- display: none;
- position: relative;
- left: 100%;
- top: 1;
- background-color: #575757;
- min-width: 160px;
- }
-
- /* Show submenu when the parent item is clicked */
- .dropdown-item.open .submenu {
- display: block;
- }
-
- /* Show the deepter nested dropdown when the button is clicked */
- .dropdown.open .dropdown-content {
- display: block;
- }
-
- /* END NESTED DROPDOWNS */
-
- .show {display: block;}
-
-/*********************************************************************************/
-/* Banner */
-/*********************************************************************************/
-
- #banner
- {
- margin-bottom: 0em;
- }
-
-/*********************************************************************************/
-/* Page */
-/*********************************************************************************/
-
- #page
- {
- background: #112e51;
-/* background: #2a2a2a; */
- }
-
-/*********************************************************************************/
-/* Main */
-/*********************************************************************************/
-
- #main
- {
- overflow: hidden;
- float: right;
- width: 1125px;
- padding: 1em 0px 5em 0px;
- background: #FFF;
- border-top: 6px solid #2980b9;
- text-align: center;
- }
-
-/*********************************************************************************/
-/* Featured */
-/*********************************************************************************/
-
- #featured
- {
- overflow: hidden;
- margin-bottom: 3em;
- padding-top: 5em;
- border-top: 1px solid rgba(0,0,0,0.08);
- }
-
-/*********************************************************************************/
-/* Sidebar */
-/*********************************************************************************/
-
- #sidebar
- {
- }
-
-/*********************************************************************************/
-/* Footer */
-/*********************************************************************************/
-
- #footer
- {
- overflow: hidden;
- padding: 0em 0em;
- border-top: 1px solid rgba(0,0,0,0.08);
- font-weight: bolder;
- }
-
-/*********************************************************************************/
-/* Welcome */
-/*********************************************************************************/
-
- #welcome
- {
- overflow: hidden;
- padding: 0em 1em;
- border-top: 1px solid rgba(0,0,0,0.08);
- font-weight: bolder;
- }
-
-/*********************************************************************************/
-/* Contact */
-/*********************************************************************************/
-
- #contact
- {
- overflow: hidden;
- padding: 0em 0em;
- border-top: 1px solid rgba(0,0,0,0.08);
- font-weight: bolder;
- }
-
-/*********************************************************************************/
-/* Copyright */
-/*********************************************************************************/
-
- #copyright
- {
- overflow: hidden;
- padding: 1em 0em;
- border-top: 1px solid rgba(0,0,0,0.08);
- }
-
- #copyright span
- {
- display: block;
- letter-spacing: 0.20em;
- line-height: 2.5em;
- text-align: center;
- text-transform: uppercase;
- font-size: 0.8em;
- font-weight: normal;
- color: rgba(0,0,0,0.7);
- }
-
- #copyright a
- {
- text-decoration: none;
- color: rgba(0,0,0,0.9);
- }
-
- .fa
- {
- display: block;
- color: #000;
- background: red;
- }
diff --git a/utils/soca/fig_gallery/marine_vrfy_display/fonts.css b/utils/soca/fig_gallery/marine_vrfy_display/fonts.css
deleted file mode 100644
index f90cc4360..000000000
--- a/utils/soca/fig_gallery/marine_vrfy_display/fonts.css
+++ /dev/null
@@ -1,422 +0,0 @@
-@charset 'UTF-8';
-
-@font-face{font-family:'FontAwesome';src:url('font/fontawesome-webfont.eot?v=4.0.1');src:url('font/fontawesome-webfont.eot?#iefix&v=4.0.1') format('embedded-opentype'),url('font/fontawesome-webfont.woff?v=4.0.1') format('woff'),url('font/fontawesome-webfont.ttf?v=4.0.1') format('truetype'),url('font/fontawesome-webfont.svg?v=4.0.1#fontawesomeregular') format('svg');font-weight:normal;font-style:normal}
-
-/*********************************************************************************/
-/* Icons */
-/* Powered by Font Awesome by Dave Gandy | http://fontawesome.io */
-/* Licensed under the SIL OFL 1.1 (font), MIT (CSS) */
-/*********************************************************************************/
-
- .fa
- {
- text-decoration: none;
- }
-
- .fa:before
- {
- display:inline-block;
- font-family: FontAwesome;
- font-size: 1.25em;
- text-decoration: none;
- font-style: normal;
- font-weight: normal;
- line-height: 1;
- -webkit-font-smoothing:antialiased;
- -moz-osx-font-smoothing:grayscale;
- }
-
- .fa-lg{font-size:1.3333333333333333em;line-height:.75em;vertical-align:-15%}
- .fa-2x{font-size:2em}
- .fa-3x{font-size:3em}
- .fa-4x{font-size:4em}
- .fa-5x{font-size:5em}
- .fa-fw{width:1.2857142857142858em;text-align:center}
- .fa-ul{padding-left:0;margin-left:2.142857142857143em;list-style-type:none}.fa-ul>li{position:relative}
- .fa-li{position:absolute;left:-2.142857142857143em;width:2.142857142857143em;top:.14285714285714285em;text-align:center}.fa-li.fa-lg{left:-1.8571428571428572em}
- .fa-border{padding:.2em .25em .15em;border:solid .08em #eee;border-radius:.1em}
- .pull-right{float:right}
- .pull-left{float:left}
- .fa.pull-left{margin-right:.3em}
- .fa.pull-right{margin-left:.3em}
- .fa-spin{-webkit-animation:spin 2s infinite linear;-moz-animation:spin 2s infinite linear;-o-animation:spin 2s infinite linear;animation:spin 2s infinite linear}
- @-moz-keyframes spin{0%{-moz-transform:rotate(0deg)} 100%{-moz-transform:rotate(359deg)}}@-webkit-keyframes spin{0%{-webkit-transform:rotate(0deg)} 100%{-webkit-transform:rotate(359deg)}}@-o-keyframes spin{0%{-o-transform:rotate(0deg)} 100%{-o-transform:rotate(359deg)}}@-ms-keyframes spin{0%{-ms-transform:rotate(0deg)} 100%{-ms-transform:rotate(359deg)}}@keyframes spin{0%{transform:rotate(0deg)} 100%{transform:rotate(359deg)}}.fa-rotate-90{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1);-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}
- .fa-rotate-180{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2);-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}
- .fa-rotate-270{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=3);-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}
- .fa-flip-horizontal{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1);-webkit-transform:scale(-1, 1);-moz-transform:scale(-1, 1);-ms-transform:scale(-1, 1);-o-transform:scale(-1, 1);transform:scale(-1, 1)}
- .fa-flip-vertical{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1);-webkit-transform:scale(1, -1);-moz-transform:scale(1, -1);-ms-transform:scale(1, -1);-o-transform:scale(1, -1);transform:scale(1, -1)}
- .fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}
- .fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}
- .fa-stack-1x{line-height:inherit}
- .fa-stack-2x{font-size:2em}
- .fa-inverse{color:#fff}
- .fa-glass:before{content:"\f000"}
- .fa-music:before{content:"\f001"}
- .fa-search:before{content:"\f002"}
- .fa-envelope-o:before{content:"\f003"}
- .fa-heart:before{content:"\f004"}
- .fa-star:before{content:"\f005"}
- .fa-star-o:before{content:"\f006"}
- .fa-user:before{content:"\f007"}
- .fa-film:before{content:"\f008"}
- .fa-th-large:before{content:"\f009"}
- .fa-th:before{content:"\f00a"}
- .fa-th-list:before{content:"\f00b"}
- .fa-check:before{content:"\f00c"}
- .fa-times:before{content:"\f00d"}
- .fa-search-plus:before{content:"\f00e"}
- .fa-search-minus:before{content:"\f010"}
- .fa-power-off:before{content:"\f011"}
- .fa-signal:before{content:"\f012"}
- .fa-gear:before,.fa-cog:before{content:"\f013"}
- .fa-trash-o:before{content:"\f014"}
- .fa-home:before{content:"\f015"}
- .fa-file-o:before{content:"\f016"}
- .fa-clock-o:before{content:"\f017"}
- .fa-road:before{content:"\f018"}
- .fa-download:before{content:"\f019"}
- .fa-arrow-circle-o-down:before{content:"\f01a"}
- .fa-arrow-circle-o-up:before{content:"\f01b"}
- .fa-inbox:before{content:"\f01c"}
- .fa-play-circle-o:before{content:"\f01d"}
- .fa-rotate-right:before,.fa-repeat:before{content:"\f01e"}
- .fa-refresh:before{content:"\f021"}
- .fa-list-alt:before{content:"\f022"}
- .fa-lock:before{content:"\f023"}
- .fa-flag:before{content:"\f024"}
- .fa-headphones:before{content:"\f025"}
- .fa-volume-off:before{content:"\f026"}
- .fa-volume-down:before{content:"\f027"}
- .fa-volume-up:before{content:"\f028"}
- .fa-qrcode:before{content:"\f029"}
- .fa-barcode:before{content:"\f02a"}
- .fa-tag:before{content:"\f02b"}
- .fa-tags:before{content:"\f02c"}
- .fa-book:before{content:"\f02d"}
- .fa-bookmark:before{content:"\f02e"}
- .fa-print:before{content:"\f02f"}
- .fa-camera:before{content:"\f030"}
- .fa-font:before{content:"\f031"}
- .fa-bold:before{content:"\f032"}
- .fa-italic:before{content:"\f033"}
- .fa-text-height:before{content:"\f034"}
- .fa-text-width:before{content:"\f035"}
- .fa-align-left:before{content:"\f036"}
- .fa-align-center:before{content:"\f037"}
- .fa-align-right:before{content:"\f038"}
- .fa-align-justify:before{content:"\f039"}
- .fa-list:before{content:"\f03a"}
- .fa-dedent:before,.fa-outdent:before{content:"\f03b"}
- .fa-indent:before{content:"\f03c"}
- .fa-video-camera:before{content:"\f03d"}
- .fa-picture-o:before{content:"\f03e"}
- .fa-pencil:before{content:"\f040"}
- .fa-map-marker:before{content:"\f041"}
- .fa-adjust:before{content:"\f042"}
- .fa-tint:before{content:"\f043"}
- .fa-edit:before,.fa-pencil-square-o:before{content:"\f044"}
- .fa-share-square-o:before{content:"\f045"}
- .fa-check-square-o:before{content:"\f046"}
- .fa-move:before{content:"\f047"}
- .fa-step-backward:before{content:"\f048"}
- .fa-fast-backward:before{content:"\f049"}
- .fa-backward:before{content:"\f04a"}
- .fa-play:before{content:"\f04b"}
- .fa-pause:before{content:"\f04c"}
- .fa-stop:before{content:"\f04d"}
- .fa-forward:before{content:"\f04e"}
- .fa-fast-forward:before{content:"\f050"}
- .fa-step-forward:before{content:"\f051"}
- .fa-eject:before{content:"\f052"}
- .fa-chevron-left:before{content:"\f053"}
- .fa-chevron-right:before{content:"\f054"}
- .fa-plus-circle:before{content:"\f055"}
- .fa-minus-circle:before{content:"\f056"}
- .fa-times-circle:before{content:"\f057"}
- .fa-check-circle:before{content:"\f058"}
- .fa-question-circle:before{content:"\f059"}
- .fa-info-circle:before{content:"\f05a"}
- .fa-crosshairs:before{content:"\f05b"}
- .fa-times-circle-o:before{content:"\f05c"}
- .fa-check-circle-o:before{content:"\f05d"}
- .fa-ban:before{content:"\f05e"}
- .fa-arrow-left:before{content:"\f060"}
- .fa-arrow-right:before{content:"\f061"}
- .fa-arrow-up:before{content:"\f062"}
- .fa-arrow-down:before{content:"\f063"}
- .fa-mail-forward:before,.fa-share:before{content:"\f064"}
- .fa-resize-full:before{content:"\f065"}
- .fa-resize-small:before{content:"\f066"}
- .fa-plus:before{content:"\f067"}
- .fa-minus:before{content:"\f068"}
- .fa-asterisk:before{content:"\f069"}
- .fa-exclamation-circle:before{content:"\f06a"}
- .fa-gift:before{content:"\f06b"}
- .fa-leaf:before{content:"\f06c"}
- .fa-fire:before{content:"\f06d"}
- .fa-eye:before{content:"\f06e"}
- .fa-eye-slash:before{content:"\f070"}
- .fa-warning:before,.fa-exclamation-triangle:before{content:"\f071"}
- .fa-plane:before{content:"\f072"}
- .fa-calendar:before{content:"\f073"}
- .fa-random:before{content:"\f074"}
- .fa-comment:before{content:"\f075"}
- .fa-magnet:before{content:"\f076"}
- .fa-chevron-up:before{content:"\f077"}
- .fa-chevron-down:before{content:"\f078"}
- .fa-retweet:before{content:"\f079"}
- .fa-shopping-cart:before{content:"\f07a"}
- .fa-folder:before{content:"\f07b"}
- .fa-folder-open:before{content:"\f07c"}
- .fa-resize-vertical:before{content:"\f07d"}
- .fa-resize-horizontal:before{content:"\f07e"}
- .fa-bar-chart-o:before{content:"\f080"}
- .fa-twitter-square:before{content:"\f081"}
- .fa-facebook-square:before{content:"\f082"}
- .fa-camera-retro:before{content:"\f083"}
- .fa-key:before{content:"\f084"}
- .fa-gears:before,.fa-cogs:before{content:"\f085"}
- .fa-comments:before{content:"\f086"}
- .fa-thumbs-o-up:before{content:"\f087"}
- .fa-thumbs-o-down:before{content:"\f088"}
- .fa-star-half:before{content:"\f089"}
- .fa-heart-o:before{content:"\f08a"}
- .fa-sign-out:before{content:"\f08b"}
- .fa-linkedin-square:before{content:"\f08c"}
- .fa-thumb-tack:before{content:"\f08d"}
- .fa-external-link:before{content:"\f08e"}
- .fa-sign-in:before{content:"\f090"}
- .fa-trophy:before{content:"\f091"}
- .fa-github-square:before{content:"\f092"}
- .fa-upload:before{content:"\f093"}
- .fa-lemon-o:before{content:"\f094"}
- .fa-phone:before{content:"\f095"}
- .fa-square-o:before{content:"\f096"}
- .fa-bookmark-o:before{content:"\f097"}
- .fa-phone-square:before{content:"\f098"}
- .fa-twitter:before{content:"\f099"}
- .fa-facebook:before{content:"\f09a"}
- .fa-github:before{content:"\f09b"}
- .fa-unlock:before{content:"\f09c"}
- .fa-credit-card:before{content:"\f09d"}
- .fa-rss:before{content:"\f09e"}
- .fa-hdd-o:before{content:"\f0a0"}
- .fa-bullhorn:before{content:"\f0a1"}
- .fa-bell:before{content:"\f0f3"}
- .fa-certificate:before{content:"\f0a3"}
- .fa-hand-o-right:before{content:"\f0a4"}
- .fa-hand-o-left:before{content:"\f0a5"}
- .fa-hand-o-up:before{content:"\f0a6"}
- .fa-hand-o-down:before{content:"\f0a7"}
- .fa-arrow-circle-left:before{content:"\f0a8"}
- .fa-arrow-circle-right:before{content:"\f0a9"}
- .fa-arrow-circle-up:before{content:"\f0aa"}
- .fa-arrow-circle-down:before{content:"\f0ab"}
- .fa-globe:before{content:"\f0ac"}
- .fa-wrench:before{content:"\f0ad"}
- .fa-tasks:before{content:"\f0ae"}
- .fa-filter:before{content:"\f0b0"}
- .fa-briefcase:before{content:"\f0b1"}
- .fa-fullscreen:before{content:"\f0b2"}
- .fa-group:before{content:"\f0c0"}
- .fa-chain:before,.fa-link:before{content:"\f0c1"}
- .fa-cloud:before{content:"\f0c2"}
- .fa-flask:before{content:"\f0c3"}
- .fa-cut:before,.fa-scissors:before{content:"\f0c4"}
- .fa-copy:before,.fa-files-o:before{content:"\f0c5"}
- .fa-paperclip:before{content:"\f0c6"}
- .fa-save:before,.fa-floppy-o:before{content:"\f0c7"}
- .fa-square:before{content:"\f0c8"}
- .fa-reorder:before{content:"\f0c9"}
- .fa-list-ul:before{content:"\f0ca"}
- .fa-list-ol:before{content:"\f0cb"}
- .fa-strikethrough:before{content:"\f0cc"}
- .fa-underline:before{content:"\f0cd"}
- .fa-table:before{content:"\f0ce"}
- .fa-magic:before{content:"\f0d0"}
- .fa-truck:before{content:"\f0d1"}
- .fa-pinterest:before{content:"\f0d2"}
- .fa-pinterest-square:before{content:"\f0d3"}
- .fa-google-plus-square:before{content:"\f0d4"}
- .fa-google-plus:before{content:"\f0d5"}
- .fa-money:before{content:"\f0d6"}
- .fa-caret-down:before{content:"\f0d7"}
- .fa-caret-up:before{content:"\f0d8"}
- .fa-caret-left:before{content:"\f0d9"}
- .fa-caret-right:before{content:"\f0da"}
- .fa-columns:before{content:"\f0db"}
- .fa-unsorted:before,.fa-sort:before{content:"\f0dc"}
- .fa-sort-down:before,.fa-sort-asc:before{content:"\f0dd"}
- .fa-sort-up:before,.fa-sort-desc:before{content:"\f0de"}
- .fa-envelope:before{content:"\f0e0"}
- .fa-linkedin:before{content:"\f0e1"}
- .fa-rotate-left:before,.fa-undo:before{content:"\f0e2"}
- .fa-legal:before,.fa-gavel:before{content:"\f0e3"}
- .fa-dashboard:before,.fa-tachometer:before{content:"\f0e4"}
- .fa-comment-o:before{content:"\f0e5"}
- .fa-comments-o:before{content:"\f0e6"}
- .fa-flash:before,.fa-bolt:before{content:"\f0e7"}
- .fa-sitemap:before{content:"\f0e8"}
- .fa-umbrella:before{content:"\f0e9"}
- .fa-paste:before,.fa-clipboard:before{content:"\f0ea"}
- .fa-lightbulb-o:before{content:"\f0eb"}
- .fa-exchange:before{content:"\f0ec"}
- .fa-cloud-download:before{content:"\f0ed"}
- .fa-cloud-upload:before{content:"\f0ee"}
- .fa-user-md:before{content:"\f0f0"}
- .fa-stethoscope:before{content:"\f0f1"}
- .fa-suitcase:before{content:"\f0f2"}
- .fa-bell-o:before{content:"\f0a2"}
- .fa-coffee:before{content:"\f0f4"}
- .fa-cutlery:before{content:"\f0f5"}
- .fa-file-text-o:before{content:"\f0f6"}
- .fa-building:before{content:"\f0f7"}
- .fa-hospital:before{content:"\f0f8"}
- .fa-ambulance:before{content:"\f0f9"}
- .fa-medkit:before{content:"\f0fa"}
- .fa-fighter-jet:before{content:"\f0fb"}
- .fa-beer:before{content:"\f0fc"}
- .fa-h-square:before{content:"\f0fd"}
- .fa-plus-square:before{content:"\f0fe"}
- .fa-angle-double-left:before{content:"\f100"}
- .fa-angle-double-right:before{content:"\f101"}
- .fa-angle-double-up:before{content:"\f102"}
- .fa-angle-double-down:before{content:"\f103"}
- .fa-angle-left:before{content:"\f104"}
- .fa-angle-right:before{content:"\f105"}
- .fa-angle-up:before{content:"\f106"}
- .fa-angle-down:before{content:"\f107"}
- .fa-desktop:before{content:"\f108"}
- .fa-laptop:before{content:"\f109"}
- .fa-tablet:before{content:"\f10a"}
- .fa-mobile-phone:before,.fa-mobile:before{content:"\f10b"}
- .fa-circle-o:before{content:"\f10c"}
- .fa-quote-left:before{content:"\f10d"}
- .fa-quote-right:before{content:"\f10e"}
- .fa-spinner:before{content:"\f110"}
- .fa-circle:before{content:"\f111"}
- .fa-mail-reply:before,.fa-reply:before{content:"\f112"}
- .fa-github-alt:before{content:"\f113"}
- .fa-folder-o:before{content:"\f114"}
- .fa-folder-open-o:before{content:"\f115"}
- .fa-expand-o:before{content:"\f116"}
- .fa-collapse-o:before{content:"\f117"}
- .fa-smile-o:before{content:"\f118"}
- .fa-frown-o:before{content:"\f119"}
- .fa-meh-o:before{content:"\f11a"}
- .fa-gamepad:before{content:"\f11b"}
- .fa-keyboard-o:before{content:"\f11c"}
- .fa-flag-o:before{content:"\f11d"}
- .fa-flag-checkered:before{content:"\f11e"}
- .fa-terminal:before{content:"\f120"}
- .fa-code:before{content:"\f121"}
- .fa-reply-all:before{content:"\f122"}
- .fa-mail-reply-all:before{content:"\f122"}
- .fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:"\f123"}
- .fa-location-arrow:before{content:"\f124"}
- .fa-crop:before{content:"\f125"}
- .fa-code-fork:before{content:"\f126"}
- .fa-unlink:before,.fa-chain-broken:before{content:"\f127"}
- .fa-question:before{content:"\f128"}
- .fa-info:before{content:"\f129"}
- .fa-exclamation:before{content:"\f12a"}
- .fa-superscript:before{content:"\f12b"}
- .fa-subscript:before{content:"\f12c"}
- .fa-eraser:before{content:"\f12d"}
- .fa-puzzle-piece:before{content:"\f12e"}
- .fa-microphone:before{content:"\f130"}
- .fa-microphone-slash:before{content:"\f131"}
- .fa-shield:before{content:"\f132"}
- .fa-calendar-o:before{content:"\f133"}
- .fa-fire-extinguisher:before{content:"\f134"}
- .fa-rocket:before{content:"\f135"}
- .fa-maxcdn:before{content:"\f136"}
- .fa-chevron-circle-left:before{content:"\f137"}
- .fa-chevron-circle-right:before{content:"\f138"}
- .fa-chevron-circle-up:before{content:"\f139"}
- .fa-chevron-circle-down:before{content:"\f13a"}
- .fa-html5:before{content:"\f13b"}
- .fa-css3:before{content:"\f13c"}
- .fa-anchor:before{content:"\f13d"}
- .fa-unlock-o:before{content:"\f13e"}
- .fa-bullseye:before{content:"\f140"}
- .fa-ellipsis-horizontal:before{content:"\f141"}
- .fa-ellipsis-vertical:before{content:"\f142"}
- .fa-rss-square:before{content:"\f143"}
- .fa-play-circle:before{content:"\f144"}
- .fa-ticket:before{content:"\f145"}
- .fa-minus-square:before{content:"\f146"}
- .fa-minus-square-o:before{content:"\f147"}
- .fa-level-up:before{content:"\f148"}
- .fa-level-down:before{content:"\f149"}
- .fa-check-square:before{content:"\f14a"}
- .fa-pencil-square:before{content:"\f14b"}
- .fa-external-link-square:before{content:"\f14c"}
- .fa-share-square:before{content:"\f14d"}
- .fa-compass:before{content:"\f14e"}
- .fa-toggle-down:before,.fa-caret-square-o-down:before{content:"\f150"}
- .fa-toggle-up:before,.fa-caret-square-o-up:before{content:"\f151"}
- .fa-toggle-right:before,.fa-caret-square-o-right:before{content:"\f152"}
- .fa-euro:before,.fa-eur:before{content:"\f153"}
- .fa-gbp:before{content:"\f154"}
- .fa-dollar:before,.fa-usd:before{content:"\f155"}
- .fa-rupee:before,.fa-inr:before{content:"\f156"}
- .fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:"\f157"}
- .fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:"\f158"}
- .fa-won:before,.fa-krw:before{content:"\f159"}
- .fa-bitcoin:before,.fa-btc:before{content:"\f15a"}
- .fa-file:before{content:"\f15b"}
- .fa-file-text:before{content:"\f15c"}
- .fa-sort-alpha-asc:before{content:"\f15d"}
- .fa-sort-alpha-desc:before{content:"\f15e"}
- .fa-sort-amount-asc:before{content:"\f160"}
- .fa-sort-amount-desc:before{content:"\f161"}
- .fa-sort-numeric-asc:before{content:"\f162"}
- .fa-sort-numeric-desc:before{content:"\f163"}
- .fa-thumbs-up:before{content:"\f164"}
- .fa-thumbs-down:before{content:"\f165"}
- .fa-youtube-square:before{content:"\f166"}
- .fa-youtube:before{content:"\f167"}
- .fa-xing:before{content:"\f168"}
- .fa-xing-square:before{content:"\f169"}
- .fa-youtube-play:before{content:"\f16a"}
- .fa-dropbox:before{content:"\f16b"}
- .fa-stack-overflow:before{content:"\f16c"}
- .fa-instagram:before{content:"\f16d"}
- .fa-flickr:before{content:"\f16e"}
- .fa-adn:before{content:"\f170"}
- .fa-bitbucket:before{content:"\f171"}
- .fa-bitbucket-square:before{content:"\f172"}
- .fa-tumblr:before{content:"\f173"}
- .fa-tumblr-square:before{content:"\f174"}
- .fa-long-arrow-down:before{content:"\f175"}
- .fa-long-arrow-up:before{content:"\f176"}
- .fa-long-arrow-left:before{content:"\f177"}
- .fa-long-arrow-right:before{content:"\f178"}
- .fa-apple:before{content:"\f179"}
- .fa-windows:before{content:"\f17a"}
- .fa-android:before{content:"\f17b"}
- .fa-linux:before{content:"\f17c"}
- .fa-dribbble:before{content:"\f17d"}
- .fa-skype:before{content:"\f17e"}
- .fa-foursquare:before{content:"\f180"}
- .fa-trello:before{content:"\f181"}
- .fa-female:before{content:"\f182"}
- .fa-male:before{content:"\f183"}
- .fa-gittip:before{content:"\f184"}
- .fa-sun-o:before{content:"\f185"}
- .fa-moon-o:before{content:"\f186"}
- .fa-archive:before{content:"\f187"}
- .fa-bug:before{content:"\f188"}
- .fa-vk:before{content:"\f189"}
- .fa-weibo:before{content:"\f18a"}
- .fa-renren:before{content:"\f18b"}
- .fa-pagelines:before{content:"\f18c"}
- .fa-stack-exchange:before{content:"\f18d"}
- .fa-arrow-circle-o-right:before{content:"\f18e"}
- .fa-arrow-circle-o-left:before{content:"\f190"}
- .fa-toggle-left:before,.fa-caret-square-o-left:before{content:"\f191"}
- .fa-dot-circle-o:before{content:"\f192"}
- .fa-wheelchair:before{content:"\f193"}
- .fa-vimeo-square:before{content:"\f194"}
- .fa-turkish-lira:before,.fa-try:before{content:"\f195"}
diff --git a/utils/soca/fig_gallery/marine_vrfy_display/index_vrfy_marine.html b/utils/soca/fig_gallery/marine_vrfy_display/index_vrfy_marine.html
deleted file mode 100644
index 80fd14b2d..000000000
--- a/utils/soca/fig_gallery/marine_vrfy_display/index_vrfy_marine.html
+++ /dev/null
@@ -1,1243 +0,0 @@
-
-
-
-
-
-GFS Experiment Verification
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Marine Figures
-
Add path to main verification directory:
-
-
- MAIN Verification Path:
-
-
-
Are the figures in your COMROOT or from HPSS? For example:
- If COMROOT, the MAIN path should include everything before /gdas.YearMonthDay subdirectories.
-
- If HPSS, the MAIN path should include everything before /YearMonthDayHour subdirectories.
-
-
- Enter COMROOT or HPSS:
-
-
-
-
Choose a date and cycle time:
-
-
-
-
- Year (YYYY):
-
-
-
-
- Month (MM):
-
-
-
-
- Day (DD):
-
-
-
-
- Hour (HH in UTC):
-
-
- After filling in the above boxes, refresh your tab. Then you can start using the menu on the left.
-
-
-
-
-
-
-
-
-
-
-
This page displays verification for the Global Forecast System (GFS) developer experiments.
-
DISCLAIMER: This web page is not "operational" and therefore not subject to 24-hr monitoring by NCEP's Central Operations staff.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/utils/soca/fig_gallery/run_vrfy.py b/utils/soca/fig_gallery/run_vrfy.py
deleted file mode 100644
index bd99cdb22..000000000
--- a/utils/soca/fig_gallery/run_vrfy.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from jinja2 import Template
-import subprocess
-from datetime import datetime, timedelta
-import yaml
-import sys
-import copy
-import os
-
-def iterate_pdy_range(start_pdy, end_pdy):
- """Generate a range of dates in YYYYMMDD format."""
- start_date = datetime.strptime(start_pdy, "%Y%m%d")
- end_date = datetime.strptime(end_pdy, "%Y%m%d")
- current_date = start_date
-
- while current_date <= end_date:
- yield current_date.strftime("%Y%m%d")
- current_date += timedelta(days=1)
-
-
-def generate_jobcard(template_path, output_path, context):
- # Read the Jinja2 template file
- with open(template_path, 'r') as file:
- template_content = file.read()
-
- # Create a Jinja2 template object
- template = Template(template_content)
-
- # Render the template with custom values
- rendered_script = template.render(**context)
-
- # Write the rendered script to the output file
- with open(output_path, 'w') as file:
- file.write(rendered_script)
-
- print(f"Bash script generated at: {output_path}")
-
-# Example usage
-if __name__ == "__main__":
-
- # Get the YAML configuration file name from the input argument
- if len(sys.argv) != 2:
- print("Usage: python run_vrfy.py ")
- sys.exit(1)
-
- config_file = sys.argv[1]
-
- # Read the YAML template from the file
- with open(config_file, "r") as file:
- yaml_template = file.read()
-
- # Load the template YAML as a dictionary
- template_dict = yaml.safe_load(yaml_template)
-
- # Render the template with Jinja2
- template = Template(yaml_template)
- config = yaml.safe_load(template.render(pslot=template_dict["pslot"]))
-
- # Iterate over the date range
- for pdy in iterate_pdy_range(config['start_pdy'], config['end_pdy']):
- context = copy.deepcopy(config)
- for cyc in config["cycs"]:
- # Update the cycle's date
- context.update({"pdy": pdy, "cyc": cyc})
-
- # Prepare the job card
- template_jobcard = os.path.join(context['homegdas'], 'utils', 'soca', 'fig_gallery', 'vrfy_jobcard.sh.j2') # Assumes a Jinja2 template file in the moegdas directory
- jobcard = f"vrfy_jobcard.{context['pslot']}.{context['pdy']}.{context['cyc']}.sh"
- generate_jobcard(template_jobcard, jobcard, context)
-
- # Submit the plotting job
- subprocess.run(f"sbatch {jobcard}", shell=True)
diff --git a/utils/soca/fig_gallery/vrfy_config.yaml b/utils/soca/fig_gallery/vrfy_config.yaml
deleted file mode 100644
index 0df76cf88..000000000
--- a/utils/soca/fig_gallery/vrfy_config.yaml
+++ /dev/null
@@ -1,16 +0,0 @@
-pslot: "nomlb"
-start_pdy: '20210701'
-end_pdy: '20210701'
-cycs: ["00", "06", "12", "18"]
-run: "gdas"
-homegdas: "/work2/noaa/da/gvernier/runs/mlb/GDASApp"
-base_exp_path: "/work2/noaa/da/gvernier/runs/mlb/{{ pslot }}/COMROOT/{{ pslot }}"
-plot_ensemble_b: "OFF"
-plot_parametric_b: "OFF"
-plot_background: "OFF"
-plot_increment: "ON"
-plot_analysis: "OFF"
-eva_plots: "ON"
-qos: "batch"
-hpc: "hercules"
-eva_module: "EVA/orion"
diff --git a/utils/soca/fig_gallery/vrfy_jobcard.sh.j2 b/utils/soca/fig_gallery/vrfy_jobcard.sh.j2
deleted file mode 100644
index 59b8cdef2..000000000
--- a/utils/soca/fig_gallery/vrfy_jobcard.sh.j2
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/bin/bash
-#SBATCH --job-name={{ job_name | default("marine_vrfy") }} # Assign a name to the job (customize as needed)
-#SBATCH --account={{ account | default("da-cpu") }}
-#SBATCH --qos={{ qos | default("debug") }}
-{% set OUTPUT = "vrfy_jobcard." + pslot + "." + pdy + "." + cyc + ".log" %}
-#SBATCH --output={{ OUTPUT }}
-#SBATCH --nodes={{ nodes | default(1) }} # Request 1 node
-#SBATCH --tasks={{ ntasks | default(20) }} # Request total tasks (processors across nodes)
-{% set HPC = hpc | default("hera") %}
-{% if HPC == "hera" %}
-#SBATCH --partition={{ partition | default("hera") }} # Specify the partition (cluster)
-{% endif %}
-#SBATCH --mem={{ memory | default("24GB") }} # Request memory
-#SBATCH --time={{ walltime | default("00:30:00") }} # Set the walltime limit
-
-# Define HOMEgdas
-export HOMEgdas="{{ homegdas }}"
-
-# Load EVA module
-module use ${HOMEgdas}/modulefiles
-module load {{ eva_module | default("EVA/hera") }}
-
-# Set PYTHONPATH using HOMEgfs
-export PYTHONPATH="${HOMEgdas}/ush/:\
-${HOMEgdas}/ush/eva/:\
-${HOMEgdas}/ush/soca/:\
-$PYTHONPATH"
-
-# Set flags to control plotConfig in the Python script
-export PLOT_ENSEMBLE_B={{ plot_ensemble_b | default("OFF") }}
-export PLOT_PARAMETRIC_B={{ plot_parametric_b | default("ON") }}
-export PLOT_BACKGROUND={{ plot_background | default("ON") }}
-export PLOT_INCREMENT={{ plot_increment | default("ON") }}
-export PLOT_ANALYSIS={{ plot_analysis | default("OFF") }}
-export PLOT_ANALYSIS={{ plot_analysis | default("OFF") }}
-export EVA_PLOTS={{ eva_plots | default("OFF") }}
-
-# Define and export the environment variables
-export cyc="{{ cyc }}"
-export RUN="{{ run | default("gdas") }}"
-export PSLOT="{{ pslot }}"
-export PDY="{{ pdy }}"
-
-# Define base experiment path
-BASE_EXP_PATH="{{ base_exp_path }}" # path to the gdas.pdy directory
-
-# Calculate previous date and cycle
-PREV_CYC=$(date -d "{{ pdy }} {{ cyc }} -6 hours" +"%Y%m%d %H")
-PREV_PDY=$(echo $PREV_CYC | cut -d' ' -f1)
-PREV_CYC_HOUR=$(echo $PREV_CYC | cut -d' ' -f2)
-
-# Define and export environment variables with paths
-export COM_OCEAN_ANALYSIS="${BASE_EXP_PATH}/gdas.{{ pdy }}/{{ cyc }}/analysis/ocean"
-export COM_ICE_HISTORY_PREV="${BASE_EXP_PATH}/gdas.${PREV_PDY}/${PREV_CYC_HOUR}/model/ice/history"
-export COM_OCEAN_HISTORY_PREV="${BASE_EXP_PATH}/gdas.${PREV_PDY}/${PREV_CYC_HOUR}/model/ocean/history"
-
-# Execute Marine Verify Analysis
-python3 ${HOMEgdas}/utils/soca/fig_gallery/vrfy_script.py
diff --git a/utils/soca/fig_gallery/vrfy_script.py b/utils/soca/fig_gallery/vrfy_script.py
deleted file mode 100644
index 214c6fa0a..000000000
--- a/utils/soca/fig_gallery/vrfy_script.py
+++ /dev/null
@@ -1,245 +0,0 @@
-import os
-import numpy as np
-import gen_eva_obs_yaml
-import marine_eva_post
-import diag_statistics
-from multiprocessing import Process
-from soca_vrfy import statePlotter, plotConfig
-import subprocess
-
-comout = os.getenv('COM_OCEAN_ANALYSIS')
-com_ice_history = os.getenv('COM_ICE_HISTORY_PREV')
-com_ocean_history = os.getenv('COM_OCEAN_HISTORY_PREV')
-cyc = os.getenv('cyc')
-RUN = os.getenv('RUN')
-
-bcyc = str((int(cyc) - 3) % 24).zfill(2)
-gcyc = str((int(cyc) - 6) % 24).zfill(2)
-grid_file = os.path.join(comout, f'{RUN}.t'+bcyc+'z.ocngrid.nc')
-layer_file = os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc')
-
-# Check if the file exists, then decide on grid_file
-if not os.path.exists(grid_file):
- # TODO: Make this work on other HPC
- grid_file = '/scratch1/NCEPDEV/da/common/validation/vrfy/gdas.t21z.ocngrid.nc'
-
-# for eva
-diagdir = os.path.join(comout, 'diags')
-HOMEgdas = os.getenv('HOMEgdas')
-
-# Get flags from environment variables (set in the bash driver)
-plot_ensemble_b = os.getenv('PLOT_ENSEMBLE_B', 'OFF').upper() == 'ON'
-plot_parametric_b = os.getenv('PLOT_PARAMETRIC_B', 'OFF').upper() == 'ON'
-plot_background = os.getenv('PLOT_BACKGROUND', 'OFF').upper() == 'ON'
-plot_increment = os.getenv('PLOT_INCREMENT', 'OFF').upper() == 'ON'
-plot_analysis = os.getenv('PLOT_ANALYSIS', 'OFF').upper() == 'ON'
-eva_plots = os.getenv('EVA_PLOTS', 'OFF').upper() == 'ON'
-
-# Initialize an empty list for the main config
-configs = []
-
-# Analysis plotting configuration
-if plot_analysis:
- configs_ana = [plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocnana.nc'),
- variables_horiz={'ave_ssh': [-1.8, 1.3],
- 'Temp': [-1.8, 34.0],
- 'Salt': [32, 40]},
- colormap='nipy_spectral',
- comout=os.path.join(comout, 'vrfy', 'ana')), # ocean surface analysis
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.iceana.nc'),
- variables_horiz={'aice_h': [0.0, 1.0],
- 'hi_h': [0.0, 4.0],
- 'hs_h': [0.0, 0.5]},
- colormap='jet',
- projs=['North', 'South', 'Global'],
- comout=os.path.join(comout, 'vrfy', 'ana'))] # sea ice analysis
- configs.extend(configs_ana)
-
-# Ensemble B plotting configuration
-if plot_ensemble_b:
- config_ens = [plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.recentering_error.nc'),
- variables_horiz={'ave_ssh': [-1, 1]},
- colormap='seismic',
- comout=os.path.join(comout, 'vrfy', 'recentering_error')), # recentering error
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_steric_stddev.nc'),
- variables_horiz={'ave_ssh': [0, 0.8]},
- colormap='gist_ncar',
- comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_steric_stddev')), # ssh steric stddev
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_unbal_stddev.nc'),
- variables_horiz={'ave_ssh': [0, 0.8]},
- colormap='gist_ncar',
- comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_unbal_stddev')), # ssh unbal stddev
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_total_stddev.nc'),
- variables_horiz={'ave_ssh': [0, 0.8]},
- colormap='gist_ncar',
- comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_total_stddev')), # ssh total stddev
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.steric_explained_variance.nc'),
- variables_horiz={'ave_ssh': [0, 1]},
- colormap='seismic',
- comout=os.path.join(comout, 'vrfy', 'bkgerr', 'steric_explained_variance'))] # steric explained variance
- configs.extend(config_ens)
-
-# Parametric B plotting configuration
-if plot_parametric_b:
- config_bkgerr = [plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, os.path.pardir, os.path.pardir,
- 'bmatrix', 'ice', f'{RUN}.t'+cyc+'z.ice.bkgerr_stddev.nc'),
- variables_horiz={'aice_h': [0.0, 0.5],
- 'hi_h': [0.0, 2.0],
- 'hs_h': [0.0, 0.2]},
- colormap='jet',
- projs=['North', 'South', 'Global'],
- comout=os.path.join(comout, 'vrfy', 'bkgerr')), # sea ice bkgerr stddev
- plotConfig(grid_file=grid_file,
- layer_file=layer_file,
- data_file=os.path.join(comout, os.path.pardir, os.path.pardir,
- 'bmatrix', 'ocean', f'{RUN}.t'+cyc+'z.ocean.bkgerr_stddev.nc'),
- lats=np.arange(-60, 60, 10),
- lons=np.arange(-280, 80, 30),
- variables_zonal={'Temp': [0, 2],
- 'Salt': [0, 0.2],
- 'u': [0, 0.5],
- 'v': [0, 0.5]},
- variables_meridional={'Temp': [0, 2],
- 'Salt': [0, 0.2],
- 'u': [0, 0.5],
- 'v': [0, 0.5]},
- variables_horiz={'Temp': [0, 2],
- 'Salt': [0, 0.2],
- 'u': [0, 0.5],
- 'v': [0, 0.5],
- 'ave_ssh': [0, 0.1]},
- colormap='jet',
- comout=os.path.join(comout, 'vrfy', 'bkgerr'))] # ocn bkgerr stddev
- configs.extend(config_bkgerr)
-
-# Background plotting configuration
-if plot_background:
- config_bkg = [plotConfig(grid_file=grid_file,
- data_file=os.path.join(com_ice_history, f'{RUN}.ice.t{gcyc}z.inst.f006.nc'),
- variables_horiz={'aice_h': [0.0, 1.0],
- 'hi_h': [0.0, 4.0],
- 'hs_h': [0.0, 0.5]},
- colormap='jet',
- projs=['North', 'South', 'Global'],
- comout=os.path.join(comout, 'vrfy', 'bkg')), # sea ice background
- plotConfig(grid_file=grid_file,
- layer_file=layer_file,
- data_file=os.path.join(com_ocean_history, f'{RUN}.ocean.t{gcyc}z.inst.f006.nc'),
- lats=np.arange(-60, 60, 10),
- lons=np.arange(-280, 80, 30),
- variables_zonal={'Temp': [-1.8, 34.0],
- 'Salt': [32, 40],
- 'u': [-1.0, 1.0],
- 'v': [-1.0, 1.0]},
- variables_meridional={'Temp': [-1.8, 34.0],
- 'Salt': [32, 40],
- 'u': [-1.0, 1.0],
- 'v': [-1.0, 1.0]},
- variables_horiz={'ave_ssh': [-1.8, 1.3],
- 'Temp': [-1.8, 34.0],
- 'Salt': [32, 40],
- 'u': [-1.0, 1.0],
- 'v': [-1.0, 1.0]},
- colormap='nipy_spectral',
- comout=os.path.join(comout, 'vrfy', 'bkg'))]
- configs.extend(config_bkg)
-
-# Increment plotting configuration
-if plot_increment:
- config_incr = [plotConfig(grid_file=grid_file,
- layer_file=layer_file,
- data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc'),
- lats=np.arange(-60, 60, 10),
- lons=np.arange(-280, 80, 30),
- variables_zonal={'Temp': [-0.5, 0.5],
- 'Salt': [-0.1, 0.1]},
- variables_horiz={'Temp': [-0.5, 0.5],
- 'Salt': [-0.1, 0.1],
- 'ave_ssh': [-0.1, 0.1]},
- variables_meridional={'Temp': [-0.5, 0.5],
- 'Salt': [-0.1, 0.1]},
- colormap='seismic',
- comout=os.path.join(comout, 'vrfy', 'incr')), # ocean increment
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ice.incr.nc'),
- lats=np.arange(-60, 60, 10),
- variables_horiz={'aice_h': [-0.2, 0.2],
- 'hi_h': [-0.5, 0.5],
- 'hs_h': [-0.1, 0.1]},
- colormap='seismic',
- projs=['North', 'South'],
- comout=os.path.join(comout, 'vrfy', 'incr'))] # sea ice increment
- plotConfig(grid_file=grid_file,
- data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ice.incr.postproc.nc'),
- lats=np.arange(-60, 60, 10),
- variables_horiz={'aice_h': [-0.2, 0.2],
- 'hi_h': [-0.5, 0.5],
- 'hs_h': [-0.1, 0.1]},
- colormap='seismic',
- projs=['North', 'South'],
- comout=os.path.join(comout, 'vrfy', 'incr.postproc'))] # sea ice increment after postprocessing
- configs.extend(config_incr)
-
-
-# Plot the marine verification figures
-def plot_marine_vrfy(config):
- ocnvrfyPlotter = statePlotter(config)
- ocnvrfyPlotter.plot()
-
-# Number of processes
-num_processes = len(configs)
-
-# Create a list to store the processes
-processes = []
-
-# Iterate over configs
-for config in configs[:num_processes]:
- process = Process(target=plot_marine_vrfy, args=(config,))
- process.start()
- processes.append(process)
-
-# Wait for all processes to finish
-for process in processes:
- process.join()
-
-# Run EVA
-if eva_plots:
- evadir = os.path.join(HOMEgdas, 'ush', 'eva')
- marinetemplate = os.path.join(evadir, 'marine_gdas_plots.yaml')
- varyaml = os.path.join(comout, 'yaml', 'var.yaml')
-
- # it would be better to refrence the dirs explicitly with the comout path
- # but eva doesn't allow for specifying output directories
- os.chdir(os.path.join(comout, 'vrfy'))
- if not os.path.exists('preevayamls'):
- os.makedirs('preevayamls')
- if not os.path.exists('evayamls'):
- os.makedirs('evayamls')
-
- gen_eva_obs_yaml.gen_eva_obs_yaml(varyaml, marinetemplate, 'preevayamls')
-
- files = os.listdir('preevayamls')
- for file in files:
- infile = os.path.join('preevayamls', file)
- marine_eva_post.marine_eva_post(infile, 'evayamls', diagdir)
-
- files = os.listdir('evayamls')
- for file in files:
- infile = os.path.join('evayamls', file)
- print('running eva on', infile)
- subprocess.run(['eva', infile], check=True)
-
-#######################################
-# calculate diag statistics
-#######################################
-
-# As of 11/12/2024 not working
-# diag_statistics.get_diag_stats()