diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index 44ca92e1d7..aa829c41c3 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -560,7 +560,7 @@ datasets: longitude: name: longitude - units: degree_east + units: degrees_east standard_name: longitude resolution: 1000: @@ -571,7 +571,7 @@ datasets: file_key: Longitude latitude: name: latitude - units: degree_north + units: degrees_north standard_name: latitude resolution: 1000: diff --git a/satpy/etc/readers/viirs_edr_active_fires.yaml b/satpy/etc/readers/viirs_edr_active_fires.yaml index 6a78e75a49..f8845b7093 100644 --- a/satpy/etc/readers/viirs_edr_active_fires.yaml +++ b/satpy/etc/readers/viirs_edr_active_fires.yaml @@ -18,10 +18,14 @@ file_types: - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_text_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler + skip_rows: 15 + columns: ["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"] file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' fires_text: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler + skip_rows: 15 + columns: ["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"] file_patterns: - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' @@ -32,7 +36,10 @@ datasets: file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] - units: '[7,8,9]->[lo,med,hi]' + units: '1' + flag_meanings: ['low', 'medium', 'high'] + flag_values: [7, 8, 9] + _FillValue: 0 confidence_pct: name: confidence_pct file_type: [fires_netcdf, fires_text] @@ -44,13 +51,13 @@ datasets: standard_name: longitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_longitude" - units: 'degrees' + units: 'degrees_east' latitude: name: latitude standard_name: latitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_latitude" - units: 'degrees' + units: 'degrees_north' power: name: power file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index a4d4c047dd..580bd7b45a 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -27,19 +27,26 @@ import dask.dataframe as dd import xarray as xr +# map platform attributes to Oscar standard name +PLATFORM_MAP = { + "NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21" +} + class VIIRSActiveFiresFileHandler(NetCDF4FileHandler): - """NetCDF4 reader for VIIRS Active Fires - """ + """NetCDF4 reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None): super(VIIRSActiveFiresFileHandler, self).__init__( - filename, filename_info, filetype_info) + filename, filename_info, filetype_info, + auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get('variable_prefix') def get_dataset(self, dsid, dsinfo): - """Get dataset function + """Get requested data as DataArray. Args: dsid: Dataset ID @@ -52,11 +59,18 @@ def get_dataset(self, dsid, dsinfo): key = dsinfo.get('file_key', dsid.name).format(variable_prefix=self.prefix) data = self[key] - data.attrs.update(dsinfo) - - platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - - data.attrs["platform_name"] = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + # rename "phoney dims" + data = data.rename(dict(zip(data.dims, ['y', 'x']))) + + # handle attributes from YAML + for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + # we only want to add information that isn't present already + if key in dsinfo and key not in data.attrs: + data.attrs[key] = dsinfo[key] + if isinstance(data.attrs.get('flag_meanings'), str): + data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + + data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") data.attrs["sensor"] = "VIIRS" return data @@ -79,8 +93,8 @@ def platform_name(self): class VIIRSActiveFiresTextFileHandler(BaseFileHandler): - """ASCII reader for VIIRS Active Fires - """ + """ASCII reader for VIIRS Active Fires.""" + def __init__(self, filename, filename_info, filetype_info): """Makes sure filepath is valid and then reads data into a Dask DataFrame @@ -90,28 +104,23 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info: Filetype information """ - if filetype_info.get('file_type') == 'fires_text_img': - self.file_content = dd.read_csv(filename, skiprows=15, header=None, - names=["latitude", "longitude", - "T4", "Along-scan", "Along-track", "confidence_cat", - "power"]) - else: - self.file_content = dd.read_csv(filename, skiprows=15, header=None, - names=["latitude", "longitude", - "T13", "Along-scan", "Along-track", "confidence_pct", - "power"]) - + skip_rows = filetype_info.get('skip_rows', 15) + columns = filetype_info['columns'] + self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) - - platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") def get_dataset(self, dsid, dsinfo): + """Get requested data as DataArray.""" ds = self[dsid.name].to_dask_array(lengths=True) - data_array = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) - data_array.attrs.update(dsinfo) - return data_array + data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) + for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + # we only want to add information that isn't present already + if key in dsinfo and key not in data.attrs: + data.attrs[key] = dsinfo[key] + if isinstance(data.attrs.get('flag_meanings'), str): + data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + return data @property def start_time(self): diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index 88f8464988..4e387e5bb1 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -244,7 +244,9 @@ def test_load_dataset(self): datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '[7,8,9]->[lo,med,hi]') + self.assertEqual(v.attrs['units'], '1') + self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) + self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1) @@ -354,7 +356,9 @@ def test_load_dataset(self, mock_obj): datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '[7,8,9]->[lo,med,hi]') + self.assertEqual(v.attrs['units'], '1') + self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) + self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1)