From 7fff64399f2a4fed2f59037b4f5cda9f0704fa93 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 20 Dec 2018 11:11:45 +0100 Subject: [PATCH 01/18] Make the sar-ice composite more stable and better at showing ice --- satpy/composites/sar.py | 16 +++++++++++----- satpy/etc/enhancements/generic.yaml | 8 +++++--- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py index 15e7740f6f..8d776fefc4 100644 --- a/satpy/composites/sar.py +++ b/satpy/composites/sar.py @@ -31,16 +31,16 @@ LOG = logging.getLogger(__name__) -def overlay(top, bottom): +def overlay(top, bottom, maxval=None): """Blending two layers. from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html """ - maxval = xu.maximum(top.max(), bottom.max()) + if maxval is None: + maxval = xu.maximum(top.max(), bottom.max()) res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval - - return res + return res.clip(min=0) class SARIce(GenericCompositor): @@ -49,7 +49,13 @@ class SARIce(GenericCompositor): def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice composite.""" (mhh, mhv) = projectables - green = overlay(mhh, mhv) + ch1attrs = mhh.attrs + ch2attrs = mhv.attrs + mhh = xu.sqrt(mhh ** 2 + 0.002) - 0.04 + mhv = xu.sqrt(mhv ** 2 + 0.002) - 0.04 + mhh.attrs = ch1attrs + mhv.attrs = ch2attrs + green = overlay(mhh, mhv, 30) * 1000 green.attrs = combine_metadata(mhh, mhv) return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 0d61708eff..522a8d8ec0 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -225,12 +225,14 @@ enhancements: - name: stretch method: *stretchfun kwargs: - stretch: linear - cutoffs: [0.2, 0.02] + stretch: crude + min_stretch: [0, 0, 0] + max_stretch: [0.10, 1.37, 0.32 ] + - name: gamma method: *gammafun kwargs: - gamma: [1, 1.2, 1] + gamma: [2, 3, 2] sar-rgb: standard_name: sar-rgb From 883bfec532b93eceb0910e5295c3d240d7f58529 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 20 Dec 2018 11:13:56 +0100 Subject: [PATCH 02/18] Add support for SAR-C azimuth noise removal --- satpy/readers/sar_c_safe.py | 39 ++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4e3a140767..69ecb1e8e8 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -94,6 +94,36 @@ def read_xml_array(elts, variable_name): return np.asarray(data), (x, y) + @staticmethod + def read_azimuth_array(elts): + """Read the azimuth noise vectors.""" + y = [] + x = [] + data = [] + for elt in elts: + first_pixel = int(elt.find('firstRangeSample').text) + last_pixel = int(elt.find('lastRangeSample').text) + lines = elt.find('line').text.split() + lut = elt.find('noiseAzimuthLut').text.split() + pixels = [first_pixel, last_pixel] + swath = elt.find('swath').text + corr = 1 + if swath == 'EW1': + corr = 1.5 + if swath == 'EW4': + corr = 1.2 + if swath == 'EW5': + corr = 1.5 + + for pixel in pixels: + y += [int(val) for val in lines] + x += [pixel] * len(lines) + data += [float(val) * corr for val in lut] + + return np.asarray(data), (x, y) + + + @staticmethod def interpolate_xml_array(data, low_res_coords, shape): """Interpolate arbitrary size dataset to a full sized grid.""" @@ -131,7 +161,14 @@ def get_noise_correction(self, shape): if not data_items: data_items = self.root.findall(".//noiseRangeVector") data, low_res_coords = self.read_xml_array(data_items, 'noiseRangeLut') - return self.interpolate_xml_array(data, low_res_coords, shape) + range_noise = self.interpolate_xml_array(data, low_res_coords, shape) + data_items = self.root.findall(".//noiseAzimuthVector") + data, low_res_coords = self.read_azimuth_array(data_items) + azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape) + noise = range_noise * azimuth_noise + else: + noise = self.interpolate_xml_array(data, low_res_coords, shape) + return noise def get_calibration(self, name, shape): """Get the calibration array.""" From 174f42b6e095f80d9db0ed0f058689a029bd00c2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 20 Dec 2018 11:16:25 +0100 Subject: [PATCH 03/18] Switch SAR-C reading to using rasterio and pass GCPs --- satpy/readers/sar_c_safe.py | 78 ++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 40 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 69ecb1e8e8..3342df5a52 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -22,11 +22,11 @@ """SAFE SAR-C format.""" import logging -import os import xml.etree.ElementTree as ET import numpy as np -from osgeo import gdal +import rasterio +from rasterio.windows import Window import dask.array as da from xarray import DataArray import xarray.ufuncs as xu @@ -174,7 +174,7 @@ def get_calibration(self, name, shape): """Get the calibration array.""" data_items = self.root.findall(".//calibrationVector") data, low_res_coords = self.read_xml_array(data_items, name) - return self.interpolate_xml_array(data ** 2, low_res_coords, shape) + return self.interpolate_xml_array(data, low_res_coords, shape) def get_calibration_constant(self): """Load the calibration constant.""" @@ -257,19 +257,12 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh): self.lats = None self.lons = None + self.alts = None self.calibration = calfh self.noise = noisefh - self.get_gdal_filehandle() - - def get_gdal_filehandle(self): - """Try to create the filehandle using gdal.""" - if os.path.exists(self.filename): - self.filehandle = gdal.Open(self.filename) - logger.debug("Loading dataset {}".format(self.filename)) - else: - raise IOError("Path {} does not exist.".format(self.filename)) + self.filehandle = rasterio.open(self.filename, 'r') def get_dataset(self, key, info): """Load a dataset.""" @@ -282,37 +275,39 @@ def get_dataset(self, key, info): logger.debug('Constructing coordinate arrays.') if self.lons is None or self.lats is None: - self.lons, self.lats = self.get_lonlats() + self.lons, self.lats, self.alts = self.get_lonlatalts() if key.name == 'latitude': data = self.lats else: data = self.lons - data.attrs = info + data.attrs.update(info) else: + calibration = key.calibration or 'gamma' data = self.read_band() logger.debug('Reading noise data.') - noise = self.noise.get_noise_correction(data.shape) + noise = self.noise.get_noise_correction(data.shape).fillna(0) logger.debug('Reading calibration data.') - cal = self.calibration.get_calibration('gamma', data.shape) + cal = self.calibration.get_calibration(calibration, data.shape) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') - + data = data.where(data > 0) data = data.astype(np.float64) - data = (data * data + cal_constant - noise) / cal + dn = data * data + data = ((dn - noise).clip(min=0) + cal_constant) / (cal * cal) # + 0.002 - data = xu.sqrt(data.clip(min=0)) + data = np.sqrt(data.clip(min=0)) - data.attrs = info + data.attrs.update(info) del noise, cal - data.attrs['units'] = 'sigma' + data.attrs['units'] = calibration return data @@ -320,27 +315,27 @@ def read_band(self, blocksize=CHUNK_SIZE): """Read the band in blocks.""" band = self.filehandle - shape = band.RasterYSize, band.RasterXSize + shape = band.shape vchunks = range(0, shape[0], blocksize) hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, band) name = 'read_band-' + token - dskx = {(name, i, j): (band.GetRasterBand(1).ReadAsArray, - hcs, vcs, - min(blocksize, shape[1] - hcs), - min(blocksize, shape[0] - vcs)) + dskx = {(name, i, j): (band.read, 1, None, + Window(hcs, vcs, + min(blocksize, shape[1] - hcs), + min(blocksize, shape[0] - vcs))) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), - dtype=np.uint16) + dtype=band.dtypes[0]) return DataArray(res, dims=('y', 'x')) - def get_lonlats(self): + def get_lonlatalts(self): """Obtain GCPs and construct latitude and longitude arrays. Args: @@ -351,17 +346,20 @@ def get_lonlats(self): """ band = self.filehandle - shape = band.RasterYSize, band.RasterXSize - - (xpoints, ypoints), (gcp_lons, gcp_lats) = self.get_gcps() + (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps = self.get_gcps() # FIXME: do interpolation on cartesion coordinates if the area is # problematic. - longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, shape) - latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, shape) + longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape) + latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) + altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) + + longitudes.attrs['gcps'] = gcps + latitudes.attrs['gcps'] = gcps + altitudes.attrs['gcps'] = gcps - return longitudes, latitudes + return longitudes, latitudes, altitudes def get_gcps(self): """Read GCP from the GDAL band. @@ -375,18 +373,18 @@ def get_gcps(self): gcp_coords (tuple): longitude and latitude 1d arrays """ - gcps = self.filehandle.GetGCPs() + gcps = self.filehandle.gcps - gcp_array = np.array( - [(p.GCPLine, p.GCPPixel, p.GCPY, p.GCPX) for p in gcps]) + gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]]) ypoints = np.unique(gcp_array[:, 0]) xpoints = np.unique(gcp_array[:, 1]) - gcp_lats = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) - gcp_lons = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) + gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) + gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) + gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) - return (xpoints, ypoints), (gcp_lons, gcp_lats) + return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps @property def start_time(self): From 889f80a94063e365444528926bb1d6885080338c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 20 Dec 2018 11:41:51 +0100 Subject: [PATCH 04/18] Remove unneeded comment --- satpy/readers/sar_c_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 3342df5a52..6457fdabbe 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -299,7 +299,7 @@ def get_dataset(self, key, info): data = data.where(data > 0) data = data.astype(np.float64) dn = data * data - data = ((dn - noise).clip(min=0) + cal_constant) / (cal * cal) # + 0.002 + data = ((dn - noise).clip(min=0) + cal_constant) / (cal * cal) data = np.sqrt(data.clip(min=0)) From 5e91f6e9bd2cd1b4708dc8e4e8e254b2e2f7892f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 20 Dec 2018 16:28:14 +0100 Subject: [PATCH 05/18] Add sar-ice-legacy and sar-land composites --- satpy/composites/sar.py | 11 +++++++++++ satpy/etc/composites/sar.yaml | 18 ++++++++++++++++++ satpy/etc/enhancements/generic.yaml | 27 +++++++++++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py index 8d776fefc4..6c078da90a 100644 --- a/satpy/composites/sar.py +++ b/satpy/composites/sar.py @@ -60,6 +60,17 @@ def __call__(self, projectables, *args, **kwargs): return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) +class SARIceLegacy(GenericCompositor): + """The SAR Ice composite, legacy version with dynamic stretching.""" + + def __call__(self, projectables, *args, **kwargs): + """Create the SAR RGB composite.""" + + (mhh, mhv) = projectables + green = overlay(mhh, mhv) + green.attrs = combine_metadata(mhh, mhv) + + return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs) class SARRGB(GenericCompositor): """The SAR RGB composite.""" diff --git a/satpy/etc/composites/sar.yaml b/satpy/etc/composites/sar.yaml index fcf7198fc9..a2a35b8e47 100644 --- a/satpy/etc/composites/sar.yaml +++ b/satpy/etc/composites/sar.yaml @@ -36,3 +36,21 @@ composites: - name: measurement polarization: hv standard_name: sar-quick + + sar-ice-legacy: + compositor: !!python/name:satpy.composites.sar.SARIceLegacy + prerequisites: + - name: measurement + polarization: hh + - name: measurement + polarization: hv + standard_name: sar-ice-legacy + + sar-land: + compositor: !!python/name:satpy.composites.sar.SARIce + prerequisites: + - name: measurement + polarization: hh + - name: measurement + polarization: hv + standard_name: sar-land diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 522a8d8ec0..246e714d60 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -234,6 +234,33 @@ enhancements: kwargs: gamma: [2, 3, 2] + sar-ice-legacy: + standard_name: sar-ice-legacy + operations: + - name: stretch + method: *stretchfun + kwargs: + stretch: linear + cutoffs: [0.2, 0.02] + - name: gamma + method: *gammafun + kwargs: + gamma: [1, 1.2, 1] + + sar-land: + standard_name: sar-land + operations: + - name: stretch + method: *stretchfun + kwargs: + stretch: crude + min_stretch: [0.01, 1. , 0.15 ] + max_stretch: [0.765, 50., 1.4] + - name: gamma + method: *gammafun + kwargs: + gamma: [1.5, 2.25, 1.5] + sar-rgb: standard_name: sar-rgb operations: From 76a9630a08b0c3c8e0197cce874295e246e70fba Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 21 Dec 2018 10:51:41 +0100 Subject: [PATCH 06/18] Deactivate sharing of filehandles for sar-c reading --- satpy/composites/sar.py | 2 ++ satpy/readers/sar_c_safe.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py index 6c078da90a..a1370c54d0 100644 --- a/satpy/composites/sar.py +++ b/satpy/composites/sar.py @@ -60,6 +60,7 @@ def __call__(self, projectables, *args, **kwargs): return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) + class SARIceLegacy(GenericCompositor): """The SAR Ice composite, legacy version with dynamic stretching.""" @@ -72,6 +73,7 @@ def __call__(self, projectables, *args, **kwargs): return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs) + class SARRGB(GenericCompositor): """The SAR RGB composite.""" diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 6457fdabbe..2d09be286f 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -262,7 +262,7 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh): self.calibration = calfh self.noise = noisefh - self.filehandle = rasterio.open(self.filename, 'r') + self.filehandle = rasterio.open(self.filename, 'r', sharing=False) def get_dataset(self, key, info): """Load a dataset.""" From d51d6d7987bdc933fb7d7ab5805a6c531372c856 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 21 Dec 2018 11:29:31 +0100 Subject: [PATCH 07/18] Thread-lock when reading sar-c data --- satpy/readers/sar_c_safe.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 2d09be286f..d9fafc6531 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -29,13 +29,14 @@ from rasterio.windows import Window import dask.array as da from xarray import DataArray -import xarray.ufuncs as xu from dask.base import tokenize +from threading import Lock from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) +read_lock = Lock() def dictify(r, root=True): @@ -322,14 +323,27 @@ def read_band(self, blocksize=CHUNK_SIZE): token = tokenize(blocksize, band) name = 'read_band-' + token - dskx = {(name, i, j): (band.read, 1, None, + def do_read(the_band, the_window, the_lock): + with the_lock: + return the_band.read(1, None, window=the_window) + + dskx = {(name, i, j): (do_read, band, Window(hcs, vcs, min(blocksize, shape[1] - hcs), - min(blocksize, shape[0] - vcs))) + min(blocksize, shape[0] - vcs)), + read_lock) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } + # dskx = {(name, i, j): (band.read, 1, None, + # Window(hcs, vcs, + # min(blocksize, shape[1] - hcs), + # min(blocksize, shape[0] - vcs))) + # for i, vcs in enumerate(vchunks) + # for j, hcs in enumerate(hchunks) + # } + res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=band.dtypes[0]) From c5f35e686ad8fbec4903eb4a098169a623375549 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 21 Dec 2018 11:40:13 +0100 Subject: [PATCH 08/18] Lock sar reading at filehandler instance level --- satpy/readers/sar_c_safe.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index d9fafc6531..c87a39849e 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -36,7 +36,6 @@ from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) -read_lock = Lock() def dictify(r, root=True): @@ -123,8 +122,6 @@ def read_azimuth_array(elts): return np.asarray(data), (x, y) - - @staticmethod def interpolate_xml_array(data, low_res_coords, shape): """Interpolate arbitrary size dataset to a full sized grid.""" @@ -262,6 +259,7 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh): self.calibration = calfh self.noise = noisefh + self.read_lock = Lock() self.filehandle = rasterio.open(self.filename, 'r', sharing=False) @@ -331,7 +329,7 @@ def do_read(the_band, the_window, the_lock): Window(hcs, vcs, min(blocksize, shape[1] - hcs), min(blocksize, shape[0] - vcs)), - read_lock) + self.read_lock) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } From 300fa4a6cda5b5ff8530a23ebefa0ef143ff1035 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 8 Jan 2019 22:17:39 +0100 Subject: [PATCH 09/18] Add calibration levels to SAR-C data reading and compositing --- satpy/__init__.py | 3 +++ satpy/etc/composites/sar.yaml | 4 ++++ satpy/etc/readers/sar-c_safe.yaml | 18 ++++++++++-------- satpy/readers/sar_c_safe.py | 5 +++++ 4 files changed, 22 insertions(+), 8 deletions(-) diff --git a/satpy/__init__.py b/satpy/__init__.py index d80f83a263..0c5e2c49c9 100644 --- a/satpy/__init__.py +++ b/satpy/__init__.py @@ -40,6 +40,9 @@ 'reflectance', 'radiance', 'counts', + 'gamma', + 'sigma_nought', + 'beta_nought', ] CALIBRATION_ORDER = os.getenv('PYTROLL_CALIBRATION_ORDER', None) if CALIBRATION_ORDER is None: diff --git a/satpy/etc/composites/sar.yaml b/satpy/etc/composites/sar.yaml index a2a35b8e47..73ae04d2d2 100644 --- a/satpy/etc/composites/sar.yaml +++ b/satpy/etc/composites/sar.yaml @@ -6,8 +6,10 @@ composites: prerequisites: - name: measurement polarization: hh + calibration: gamma - name: measurement polarization: hv + calibraion: gamma standard_name: sar-ice sar-ice-iw: @@ -15,8 +17,10 @@ composites: prerequisites: - name: measurement polarization: vv + calibration: gamma - name: measurement polarization: vh + calibration: gamma standard_name: sar-ice sar-rgb: diff --git a/satpy/etc/readers/sar-c_safe.yaml b/satpy/etc/readers/sar-c_safe.yaml index 86a288661b..e7d1ccaa09 100644 --- a/satpy/etc/readers/sar-c_safe.yaml +++ b/satpy/etc/readers/sar-c_safe.yaml @@ -47,14 +47,16 @@ datasets: wavelength: [5.400, 5.405, 5.410] resolution: 80 polarization: [hh, hv, vv, vh] - # navigation: esa_geo - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" + calibration: + gamma: + standard_name: backscatter + units: 1 + sigma_nought: + standard_name: backscatter + units: 1 + beta_nought: + standard_name: backscatter + units: 1 coordinates: [longitude, latitude] file_type: safe_measurement diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index c87a39849e..6f295751de 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -284,6 +284,11 @@ def get_dataset(self, key, info): else: calibration = key.calibration or 'gamma' + if calibration == 'sigma_nought': + calibration = 'sigmaNought' + elif calibration == 'beta_nought': + calibration = 'betaNought' + data = self.read_band() logger.debug('Reading noise data.') From 7514ec0121fc427d6ac6bcc4768a5d39156da43c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 14 Jan 2019 19:57:16 +0100 Subject: [PATCH 10/18] Add function to read SAR data using native blocks --- satpy/readers/sar_c_safe.py | 37 ++++++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 6f295751de..2984b59f31 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -315,8 +315,35 @@ def get_dataset(self, key, info): return data + def read_band_blocks(self, blocksize=CHUNK_SIZE): + """Read the band in native blocks.""" + # For sentinel 1 data, the block are 1 line, and dask seems to choke on that. + band = self.filehandle + + shape = band.shape + token = tokenize(blocksize, band) + name = 'read_band-' + token + dskx = dict() + if len(band.block_shapes) != 1: + raise NotImplementedError('Bands with multiple shapes not supported.') + else: + chunks = band.block_shapes[0] + import ipdb; ipdb.set_trace() + + def do_read(the_band, the_window, the_lock): + with the_lock: + return the_band.read(1, None, window=the_window) + + for ji, window in band.block_windows(1): + dskx[(name, ) + ji] = (do_read, band, window, self.read_lock) + + res = da.Array(dskx, name, shape=list(shape), + chunks=chunks, + dtype=band.dtypes[0]) + return DataArray(res, dims=('y', 'x')) + def read_band(self, blocksize=CHUNK_SIZE): - """Read the band in blocks.""" + """Read the band in chunks.""" band = self.filehandle shape = band.shape @@ -339,14 +366,6 @@ def do_read(the_band, the_window, the_lock): for j, hcs in enumerate(hchunks) } - # dskx = {(name, i, j): (band.read, 1, None, - # Window(hcs, vcs, - # min(blocksize, shape[1] - hcs), - # min(blocksize, shape[0] - vcs))) - # for i, vcs in enumerate(vchunks) - # for j, hcs in enumerate(hchunks) - # } - res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=band.dtypes[0]) From 9a05fa63bdfb4b0ff73a3a3dd4b804dabbcb7032 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 29 Jan 2019 14:14:46 +0100 Subject: [PATCH 11/18] Use better chunking for reading sar data --- satpy/readers/sar_c_safe.py | 65 +++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 24 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 2984b59f31..4889b9caa7 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -123,11 +123,11 @@ def read_azimuth_array(elts): return np.asarray(data), (x, y) @staticmethod - def interpolate_xml_array(data, low_res_coords, shape): + def interpolate_xml_array(data, low_res_coords, shape, chunks): """Interpolate arbitrary size dataset to a full sized grid.""" xpoints, ypoints = low_res_coords - return interpolate_xarray_linear(xpoints, ypoints, data, shape) + return interpolate_xarray_linear(xpoints, ypoints, data, shape, chunks=chunks) def get_dataset(self, key, info): """Load a dataset.""" @@ -152,27 +152,27 @@ def get_dataset(self, key, info): data = self.interpolate_xml_array(data, low_res_coords, data.shape) - def get_noise_correction(self, shape): + def get_noise_correction(self, shape, chunks=None): """Get the noise correction array.""" data_items = self.root.findall(".//noiseVector") data, low_res_coords = self.read_xml_array(data_items, 'noiseLut') if not data_items: data_items = self.root.findall(".//noiseRangeVector") data, low_res_coords = self.read_xml_array(data_items, 'noiseRangeLut') - range_noise = self.interpolate_xml_array(data, low_res_coords, shape) + range_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) data_items = self.root.findall(".//noiseAzimuthVector") data, low_res_coords = self.read_azimuth_array(data_items) - azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape) + azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) noise = range_noise * azimuth_noise else: - noise = self.interpolate_xml_array(data, low_res_coords, shape) + noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) return noise - def get_calibration(self, name, shape): + def get_calibration(self, name, shape, chunks=None): """Get the calibration array.""" data_items = self.root.findall(".//calibrationVector") data, low_res_coords = self.read_xml_array(data_items, name) - return self.interpolate_xml_array(data, low_res_coords, shape) + return self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) def get_calibration_constant(self): """Load the calibration constant.""" @@ -220,10 +220,16 @@ def interpolate_xarray(xpoints, ypoints, values, shape, kind='cubic', return DataArray(res, dims=('y', 'x')) -def interpolate_xarray_linear(xpoints, ypoints, values, shape): +def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" from scipy.interpolate.interpnd import (LinearNDInterpolator, _ndim_coords_from_arrays) + + if isinstance(chunks, (list, tuple)): + vchunks, hchunks = chunks + else: + vchunks, hchunks = chunks, chunks + points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints), np.asarray(xpoints))).T) @@ -232,8 +238,8 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape): def intp(grid_x, grid_y, interpolator): return interpolator((grid_y, grid_x)) - grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=CHUNK_SIZE), - da.arange(shape[0], chunks=CHUNK_SIZE)) + grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks), + da.arange(shape[0], chunks=vchunks)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) @@ -290,23 +296,23 @@ def get_dataset(self, key, info): calibration = 'betaNought' data = self.read_band() + # chunks = data.chunks # This seems to be slower for some reason + chunks = CHUNK_SIZE logger.debug('Reading noise data.') - - noise = self.noise.get_noise_correction(data.shape).fillna(0) + noise = self.noise.get_noise_correction(data.shape, chunks=chunks).fillna(0) logger.debug('Reading calibration data.') - cal = self.calibration.get_calibration(calibration, data.shape) + cal = self.calibration.get_calibration(calibration, data.shape, chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') data = data.where(data > 0) data = data.astype(np.float64) dn = data * data - data = ((dn - noise).clip(min=0) + cal_constant) / (cal * cal) - - data = np.sqrt(data.clip(min=0)) + data = ((dn - noise).clip(min=0) + cal_constant) + data = (np.sqrt(data) / cal).clip(min=0) data.attrs.update(info) del noise, cal @@ -328,7 +334,6 @@ def read_band_blocks(self, blocksize=CHUNK_SIZE): raise NotImplementedError('Bands with multiple shapes not supported.') else: chunks = band.block_shapes[0] - import ipdb; ipdb.set_trace() def do_read(the_band, the_window, the_lock): with the_lock: @@ -347,10 +352,22 @@ def read_band(self, blocksize=CHUNK_SIZE): band = self.filehandle shape = band.shape - vchunks = range(0, shape[0], blocksize) - hchunks = range(0, shape[1], blocksize) + if len(band.block_shapes) == 1: + total_size = blocksize * blocksize * 1.0 + lines, cols = band.block_shapes[0] + if cols > lines: + hblocks = cols + vblocks = int(total_size / cols / lines) + else: + hblocks = int(total_size / cols / lines) + vblocks = lines + else: + hblocks = blocksize + vblocks = blocksize + vchunks = range(0, shape[0], vblocks) + hchunks = range(0, shape[1], hblocks) - token = tokenize(blocksize, band) + token = tokenize(hblocks, vblocks, band) name = 'read_band-' + token def do_read(the_band, the_window, the_lock): @@ -359,15 +376,15 @@ def do_read(the_band, the_window, the_lock): dskx = {(name, i, j): (do_read, band, Window(hcs, vcs, - min(blocksize, shape[1] - hcs), - min(blocksize, shape[0] - vcs)), + min(hblocks, shape[1] - hcs), + min(vblocks, shape[0] - vcs)), self.read_lock) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), - chunks=(blocksize, blocksize), + chunks=(vblocks, hblocks), dtype=band.dtypes[0]) return DataArray(res, dims=('y', 'x')) From 1e3c220532c0fd98341c6f547637172e23d78f0e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 27 Feb 2019 13:52:55 +0100 Subject: [PATCH 12/18] Split crs and gcps in SAR reader --- satpy/readers/sar_c_safe.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4889b9caa7..476af2fea9 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -399,7 +399,7 @@ def get_lonlatalts(self): """ band = self.filehandle - (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps = self.get_gcps() + (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() # FIXME: do interpolation on cartesion coordinates if the area is # problematic. @@ -409,8 +409,11 @@ def get_lonlatalts(self): altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) longitudes.attrs['gcps'] = gcps + longitudes.attrs['crs'] = crs latitudes.attrs['gcps'] = gcps + latitudes.attrs['crs'] = crs altitudes.attrs['gcps'] = gcps + altitudes.attrs['crs'] = crs return longitudes, latitudes, altitudes From 018b2a69bb9944f32851de8480ccfb689b1e9844 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Mar 2019 20:03:52 +0100 Subject: [PATCH 13/18] Add tests for the sar-c SAFE reader --- satpy/readers/sar_c_safe.py | 2 +- satpy/tests/reader_tests/__init__.py | 3 +- satpy/tests/reader_tests/test_sar-c_safe.py | 69 +++++++++++++++++++++ 3 files changed, 72 insertions(+), 2 deletions(-) create mode 100644 satpy/tests/reader_tests/test_sar-c_safe.py diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 128560b74d..e13b42fb60 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2016 Martin Raspaud +# Copyright (c) 2016-2019 Pytroll developers # Author(s): diff --git a/satpy/tests/reader_tests/__init__.py b/satpy/tests/reader_tests/__init__.py index 8a2aeba50e..c7f0ab2649 100644 --- a/satpy/tests/reader_tests/__init__.py +++ b/satpy/tests/reader_tests/__init__.py @@ -37,7 +37,7 @@ test_scmi, test_ahi_hrit, test_goes_imager_nc, test_nc_slstr, test_olci_nc, test_viirs_edr_flood, test_nwcsaf_nc, - test_seviri_l1b_hrit) + test_seviri_l1b_hrit, test_sar_c_safe) if sys.version_info < (2, 7): @@ -79,5 +79,6 @@ def suite(): mysuite.addTests(test_nc_slstr.suite()) mysuite.addTests(test_nwcsaf_nc.suite()) mysuite.addTests(test_seviri_l1b_hrit.suite()) + mysuite.addTests(test_sar_c_safe.suite()) return mysuite diff --git a/satpy/tests/reader_tests/test_sar-c_safe.py b/satpy/tests/reader_tests/test_sar-c_safe.py new file mode 100644 index 0000000000..f89eacda67 --- /dev/null +++ b/satpy/tests/reader_tests/test_sar-c_safe.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2019 Pytroll developers + +# Author(s): + +# Martin Raspaud + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Module for testing the satpy.readers.sar-c_safe module. +""" +import sys + +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest + +try: + import unittest.mock as mock +except ImportError: + import mock + + +class TestSAFEGRD(unittest.TestCase): + """Test various nc_slstr file handlers.""" + @mock.patch('satpy.readers.sar_c_safe.SAFEGRD.get_gdal_filehandle') + @mock.patch('rasterio.open') + def test_instantiate(self, mocked_dataset, ggf): + """Test initialization of file handlers.""" + from satpy.readers.sar_c_safe import SAFEGRD + + filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, + 'polarization': 'vv'} + filetype_info = 'bla' + noisefh = mock.MagicMock() + calfh = mock.MagicMock() + + test = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/s1a-iw-grd' + '-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', + filename_info, filetype_info, calfh, noisefh) + assert(test._polarization == 'vv') + assert(test.calibration == calfh) + assert(test.noise == noisefh) + ggf.assert_called() + + +def suite(): + """The test suite for test_nc_slstr.""" + loader = unittest.TestLoader() + mysuite = unittest.TestSuite() + mysuite.addTest(loader.loadTestsFromTestCase(TestSAFEGRD)) + return mysuite + + +if __name__ == '__main__': + unittest.main() From 449f941ed011d3980f03e3d05f8444155b0f418d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Mar 2019 22:31:38 +0100 Subject: [PATCH 14/18] Rename SAR-C SAFE test file --- .../tests/reader_tests/{test_sar-c_safe.py => test_sar_c_safe.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename satpy/tests/reader_tests/{test_sar-c_safe.py => test_sar_c_safe.py} (100%) diff --git a/satpy/tests/reader_tests/test_sar-c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py similarity index 100% rename from satpy/tests/reader_tests/test_sar-c_safe.py rename to satpy/tests/reader_tests/test_sar_c_safe.py From 2f77509ee4e5118ed0f8d33b1ab9c7ec2e37af74 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Mar 2019 22:52:48 +0100 Subject: [PATCH 15/18] Fix sar reader tests --- satpy/tests/reader_tests/test_sar_c_safe.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index f89eacda67..0b465eee0e 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -36,9 +36,8 @@ class TestSAFEGRD(unittest.TestCase): """Test various nc_slstr file handlers.""" - @mock.patch('satpy.readers.sar_c_safe.SAFEGRD.get_gdal_filehandle') @mock.patch('rasterio.open') - def test_instantiate(self, mocked_dataset, ggf): + def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.sar_c_safe import SAFEGRD @@ -54,7 +53,7 @@ def test_instantiate(self, mocked_dataset, ggf): assert(test._polarization == 'vv') assert(test.calibration == calfh) assert(test.noise == noisefh) - ggf.assert_called() + mocked_dataset.assert_called() def suite(): From 4c94fcd8a462006fc805e8b397a62ed01ad4b317 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 5 Mar 2019 09:18:19 +0100 Subject: [PATCH 16/18] Add documentation to the SAR-C reader --- satpy/readers/sar_c_safe.py | 45 ++++++++++++++++++--- satpy/tests/reader_tests/test_sar_c_safe.py | 2 +- 2 files changed, 41 insertions(+), 6 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index e13b42fb60..c7ab47cc78 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -19,7 +19,26 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""SAFE SAR-C format.""" +"""SAFE SAR-C reader +********************* + +This module implements a reader for Sentinel 1 SAR-C GRD (level1) SAFE format as +provided by ESA. The format is comprised of a directory containing multiple +files, most notably two measurement files in geotiff and a few xml files for +calibration, noise and metadata. + +References: + + - *Level 1 Product Formatting* + https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-1-sar/products-algorithms/level-1-product-formatting + + - J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, + *"Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"* + in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, + pp. 1555-1565, March 2018. + doi: `10.1109/TGRS.2017.2765248 `_ + +""" import logging import xml.etree.ElementTree as ET @@ -95,8 +114,19 @@ def read_xml_array(elts, variable_name): return np.asarray(data), (x, y) @staticmethod - def read_azimuth_array(elts): - """Read the azimuth noise vectors.""" + def read_azimuth_noise_array(elts): + """Read the azimuth noise vectors. + + The azimuth noise is normalized per swath to account for gain + differences between the swaths in EW mode. + + This is based on the this reference: + J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, + "Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel," + in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, + pp. 1555-1565, March 2018. + doi: 10.1109/TGRS.2017.2765248 + """ y = [] x = [] data = [] @@ -161,7 +191,7 @@ def get_noise_correction(self, shape, chunks=None): data, low_res_coords = self.read_xml_array(data_items, 'noiseRangeLut') range_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) data_items = self.root.findall(".//noiseAzimuthVector") - data, low_res_coords = self.read_azimuth_array(data_items) + data, low_res_coords = self.read_azimuth_noise_array(data_items) azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) noise = range_noise * azimuth_noise else: @@ -250,7 +280,12 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE class SAFEGRD(BaseFileHandler): - """Measurement file reader.""" + """Measurement file reader. + + The measurement files are in geotiff format and read using rasterio. For + performance reasons, the reading adapts the chunk size to match the file's + block size. + """ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh): super(SAFEGRD, self).__init__(filename, filename_info, diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 0b465eee0e..c644a5ab0d 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -57,7 +57,7 @@ def test_instantiate(self, mocked_dataset): def suite(): - """The test suite for test_nc_slstr.""" + """The test suite for test_sar_c_safe.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSAFEGRD)) From 3897d28d4086c26f1e7ad296a966e87cd17b926b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 5 Mar 2019 09:18:55 +0100 Subject: [PATCH 17/18] Fix doc formatting in goes nc reader --- satpy/readers/goes_imager_nc.py | 172 +++++++++++++++++++------------- 1 file changed, 102 insertions(+), 70 deletions(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 31c122fb9e..70a33a8d27 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -66,91 +66,123 @@ maximum calibration coefficients and computing the difference. The maximum differences are: +======= ===== ==== GOES-8 -====== -00_7 0.0 % # Counts are normalized -03_9 0.187 K -06_8 0.0 K # only one detector -10_7 0.106 K -12_0 0.036 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 0.0 % # Counts are normalized +03_9 0.187 K +06_8 0.0 K # only one detector +10_7 0.106 K +12_0 0.036 K +======= ===== ==== + +======= ===== ==== GOES-9 -======== -00_7 0.0 % # Counts are normalized -03_9 0.0 K # coefs identical -06_8 0.0 K # only one detector -10_7 0.021 K -12_0 0.006 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 0.0 % # Counts are normalized +03_9 0.0 K # coefs identical +06_8 0.0 K # only one detector +10_7 0.021 K +12_0 0.006 K +======= ===== ==== + +======= ===== ==== GOES-10 -======== -00_7 1.05 % -03_9 0.0 K # coefs identical -06_8 0.0 K # only one detector -10_7 0.013 K -12_0 0.004 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 1.05 % +03_9 0.0 K # coefs identical +06_8 0.0 K # only one detector +10_7 0.013 K +12_0 0.004 K +======= ===== ==== + +======= ===== ==== GOES-11 -======== -00_7 1.25 % -03_9 0.0 K # coefs identical -06_8 0.0 K # only one detector -10_7 0.0 K # coefs identical -12_0 0.065 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 1.25 % +03_9 0.0 K # coefs identical +06_8 0.0 K # only one detector +10_7 0.0 K # coefs identical +12_0 0.065 K +======= ===== ==== + +======= ===== ==== GOES-12 -======== -00_7 0.8 % -03_9 0.0 K # coefs identical -06_5 0.044 K -10_7 0.0 K # coefs identical -13_3 0.0 K # only one detector - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 0.8 % +03_9 0.0 K # coefs identical +06_5 0.044 K +10_7 0.0 K # coefs identical +13_3 0.0 K # only one detector +======= ===== ==== + +======= ===== ==== GOES-13 -======== -00_7 1.31 % -03_9 0.0 K # coefs identical -06_5 0.085 K -10_7 0.008 K -13_3 0.0 K # only one detector - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 1.31 % +03_9 0.0 K # coefs identical +06_5 0.085 K +10_7 0.008 K +13_3 0.0 K # only one detector +======= ===== ==== + +======= ===== ==== GOES-14 -======== -00_7 0.66 % -03_9 0.0 K # coefs identical -06_5 0.043 K -10_7 0.006 K -13_3 0.003 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 0.66 % +03_9 0.0 K # coefs identical +06_5 0.043 K +10_7 0.006 K +13_3 0.003 K +======= ===== ==== + +======= ===== ==== GOES-15 -======== -00_7 0.86 % -03_9 0.0 K # coefs identical -06_5 0.02 K -10_7 0.009 K -13_3 0.008 K - +------------------ +Channel Diff Unit +======= ===== ==== +00_7 0.86 % +03_9 0.0 K # coefs identical +06_5 0.02 K +10_7 0.009 K +13_3 0.008 K +======= ===== ==== References: -[GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf -[BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf -[BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf -[IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html -[VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html -[FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf -[SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html -[SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html +- [GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf +- [BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf +- [BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf +- [IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html +- [VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html +- [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf +- [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html +- [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html Eumetsat formated netCDF data: The main differences are: -1: The geolocation is in a separate file, used for all bands -2: VIS data is calibrated to Albedo (or reflectance) -3: IR data is calibrated to radiance. -4: VIS data is downsampled to IR resolution (4km) -5: File name differs also slightly -6: Data is received via EumetCast + +1. The geolocation is in a separate file, used for all bands +2. VIS data is calibrated to Albedo (or reflectance) +3. IR data is calibrated to radiance. +4. VIS data is downsampled to IR resolution (4km) +5. File name differs also slightly +6. Data is received via EumetCast """ From c9b78c54ddeccf98f74b8f0141a5f5591e8b1ea0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2019 09:55:49 +0100 Subject: [PATCH 18/18] Remove the use of xu for sar compositing --- satpy/composites/sar.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py index a1370c54d0..87f07a7afe 100644 --- a/satpy/composites/sar.py +++ b/satpy/composites/sar.py @@ -24,7 +24,7 @@ import logging -import xarray.ufuncs as xu +import numpy as np from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata @@ -37,7 +37,7 @@ def overlay(top, bottom, maxval=None): from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html """ if maxval is None: - maxval = xu.maximum(top.max(), bottom.max()) + maxval = np.maximum(top.max(), bottom.max()) res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval return res.clip(min=0) @@ -51,8 +51,8 @@ def __call__(self, projectables, *args, **kwargs): (mhh, mhv) = projectables ch1attrs = mhh.attrs ch2attrs = mhv.attrs - mhh = xu.sqrt(mhh ** 2 + 0.002) - 0.04 - mhv = xu.sqrt(mhv ** 2 + 0.002) - 0.04 + mhh = np.sqrt(mhh ** 2 + 0.002) - 0.04 + mhv = np.sqrt(mhv ** 2 + 0.002) - 0.04 mhh.attrs = ch1attrs mhv.attrs = ch2attrs green = overlay(mhh, mhv, 30) * 1000