diff --git a/satpy/__init__.py b/satpy/__init__.py
index d80f83a263..0c5e2c49c9 100644
--- a/satpy/__init__.py
+++ b/satpy/__init__.py
@@ -40,6 +40,9 @@
'reflectance',
'radiance',
'counts',
+ 'gamma',
+ 'sigma_nought',
+ 'beta_nought',
]
CALIBRATION_ORDER = os.getenv('PYTROLL_CALIBRATION_ORDER', None)
if CALIBRATION_ORDER is None:
diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py
index 15e7740f6f..87f07a7afe 100644
--- a/satpy/composites/sar.py
+++ b/satpy/composites/sar.py
@@ -24,23 +24,23 @@
import logging
-import xarray.ufuncs as xu
+import numpy as np
from satpy.composites import GenericCompositor
from satpy.dataset import combine_metadata
LOG = logging.getLogger(__name__)
-def overlay(top, bottom):
+def overlay(top, bottom, maxval=None):
"""Blending two layers.
from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html
"""
- maxval = xu.maximum(top.max(), bottom.max())
+ if maxval is None:
+ maxval = np.maximum(top.max(), bottom.max())
res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval
-
- return res
+ return res.clip(min=0)
class SARIce(GenericCompositor):
@@ -49,12 +49,31 @@ class SARIce(GenericCompositor):
def __call__(self, projectables, *args, **kwargs):
"""Create the SAR Ice composite."""
(mhh, mhv) = projectables
- green = overlay(mhh, mhv)
+ ch1attrs = mhh.attrs
+ ch2attrs = mhv.attrs
+ mhh = np.sqrt(mhh ** 2 + 0.002) - 0.04
+ mhv = np.sqrt(mhv ** 2 + 0.002) - 0.04
+ mhh.attrs = ch1attrs
+ mhv.attrs = ch2attrs
+ green = overlay(mhh, mhv, 30) * 1000
green.attrs = combine_metadata(mhh, mhv)
return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs)
+class SARIceLegacy(GenericCompositor):
+ """The SAR Ice composite, legacy version with dynamic stretching."""
+
+ def __call__(self, projectables, *args, **kwargs):
+ """Create the SAR RGB composite."""
+
+ (mhh, mhv) = projectables
+ green = overlay(mhh, mhv)
+ green.attrs = combine_metadata(mhh, mhv)
+
+ return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs)
+
+
class SARRGB(GenericCompositor):
"""The SAR RGB composite."""
diff --git a/satpy/etc/composites/sar.yaml b/satpy/etc/composites/sar.yaml
index fcf7198fc9..73ae04d2d2 100644
--- a/satpy/etc/composites/sar.yaml
+++ b/satpy/etc/composites/sar.yaml
@@ -6,8 +6,10 @@ composites:
prerequisites:
- name: measurement
polarization: hh
+ calibration: gamma
- name: measurement
polarization: hv
+ calibraion: gamma
standard_name: sar-ice
sar-ice-iw:
@@ -15,8 +17,10 @@ composites:
prerequisites:
- name: measurement
polarization: vv
+ calibration: gamma
- name: measurement
polarization: vh
+ calibration: gamma
standard_name: sar-ice
sar-rgb:
@@ -36,3 +40,21 @@ composites:
- name: measurement
polarization: hv
standard_name: sar-quick
+
+ sar-ice-legacy:
+ compositor: !!python/name:satpy.composites.sar.SARIceLegacy
+ prerequisites:
+ - name: measurement
+ polarization: hh
+ - name: measurement
+ polarization: hv
+ standard_name: sar-ice-legacy
+
+ sar-land:
+ compositor: !!python/name:satpy.composites.sar.SARIce
+ prerequisites:
+ - name: measurement
+ polarization: hh
+ - name: measurement
+ polarization: hv
+ standard_name: sar-land
diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml
index 0d61708eff..246e714d60 100644
--- a/satpy/etc/enhancements/generic.yaml
+++ b/satpy/etc/enhancements/generic.yaml
@@ -222,6 +222,21 @@ enhancements:
sar-ice:
standard_name: sar-ice
operations:
+ - name: stretch
+ method: *stretchfun
+ kwargs:
+ stretch: crude
+ min_stretch: [0, 0, 0]
+ max_stretch: [0.10, 1.37, 0.32 ]
+
+ - name: gamma
+ method: *gammafun
+ kwargs:
+ gamma: [2, 3, 2]
+
+ sar-ice-legacy:
+ standard_name: sar-ice-legacy
+ operations:
- name: stretch
method: *stretchfun
kwargs:
@@ -232,6 +247,20 @@ enhancements:
kwargs:
gamma: [1, 1.2, 1]
+ sar-land:
+ standard_name: sar-land
+ operations:
+ - name: stretch
+ method: *stretchfun
+ kwargs:
+ stretch: crude
+ min_stretch: [0.01, 1. , 0.15 ]
+ max_stretch: [0.765, 50., 1.4]
+ - name: gamma
+ method: *gammafun
+ kwargs:
+ gamma: [1.5, 2.25, 1.5]
+
sar-rgb:
standard_name: sar-rgb
operations:
diff --git a/satpy/etc/readers/sar-c_safe.yaml b/satpy/etc/readers/sar-c_safe.yaml
index 86a288661b..e7d1ccaa09 100644
--- a/satpy/etc/readers/sar-c_safe.yaml
+++ b/satpy/etc/readers/sar-c_safe.yaml
@@ -47,14 +47,16 @@ datasets:
wavelength: [5.400, 5.405, 5.410]
resolution: 80
polarization: [hh, hv, vv, vh]
- # navigation: esa_geo
- # calibration:
- # radiance:
- # standard_name: toa_outgoing_radiance_per_unit_wavelength
- # units: W m-2 um-1 sr-1
- # reflectance:
- # standard_name: toa_bidirectional_reflectance
- # units: "%"
+ calibration:
+ gamma:
+ standard_name: backscatter
+ units: 1
+ sigma_nought:
+ standard_name: backscatter
+ units: 1
+ beta_nought:
+ standard_name: backscatter
+ units: 1
coordinates: [longitude, latitude]
file_type: safe_measurement
diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py
index 31c122fb9e..70a33a8d27 100644
--- a/satpy/readers/goes_imager_nc.py
+++ b/satpy/readers/goes_imager_nc.py
@@ -66,91 +66,123 @@
maximum calibration coefficients and computing the difference. The maximum
differences are:
+======= ===== ====
GOES-8
-======
-00_7 0.0 % # Counts are normalized
-03_9 0.187 K
-06_8 0.0 K # only one detector
-10_7 0.106 K
-12_0 0.036 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 0.0 % # Counts are normalized
+03_9 0.187 K
+06_8 0.0 K # only one detector
+10_7 0.106 K
+12_0 0.036 K
+======= ===== ====
+
+======= ===== ====
GOES-9
-========
-00_7 0.0 % # Counts are normalized
-03_9 0.0 K # coefs identical
-06_8 0.0 K # only one detector
-10_7 0.021 K
-12_0 0.006 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 0.0 % # Counts are normalized
+03_9 0.0 K # coefs identical
+06_8 0.0 K # only one detector
+10_7 0.021 K
+12_0 0.006 K
+======= ===== ====
+
+======= ===== ====
GOES-10
-========
-00_7 1.05 %
-03_9 0.0 K # coefs identical
-06_8 0.0 K # only one detector
-10_7 0.013 K
-12_0 0.004 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 1.05 %
+03_9 0.0 K # coefs identical
+06_8 0.0 K # only one detector
+10_7 0.013 K
+12_0 0.004 K
+======= ===== ====
+
+======= ===== ====
GOES-11
-========
-00_7 1.25 %
-03_9 0.0 K # coefs identical
-06_8 0.0 K # only one detector
-10_7 0.0 K # coefs identical
-12_0 0.065 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 1.25 %
+03_9 0.0 K # coefs identical
+06_8 0.0 K # only one detector
+10_7 0.0 K # coefs identical
+12_0 0.065 K
+======= ===== ====
+
+======= ===== ====
GOES-12
-========
-00_7 0.8 %
-03_9 0.0 K # coefs identical
-06_5 0.044 K
-10_7 0.0 K # coefs identical
-13_3 0.0 K # only one detector
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 0.8 %
+03_9 0.0 K # coefs identical
+06_5 0.044 K
+10_7 0.0 K # coefs identical
+13_3 0.0 K # only one detector
+======= ===== ====
+
+======= ===== ====
GOES-13
-========
-00_7 1.31 %
-03_9 0.0 K # coefs identical
-06_5 0.085 K
-10_7 0.008 K
-13_3 0.0 K # only one detector
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 1.31 %
+03_9 0.0 K # coefs identical
+06_5 0.085 K
+10_7 0.008 K
+13_3 0.0 K # only one detector
+======= ===== ====
+
+======= ===== ====
GOES-14
-========
-00_7 0.66 %
-03_9 0.0 K # coefs identical
-06_5 0.043 K
-10_7 0.006 K
-13_3 0.003 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 0.66 %
+03_9 0.0 K # coefs identical
+06_5 0.043 K
+10_7 0.006 K
+13_3 0.003 K
+======= ===== ====
+
+======= ===== ====
GOES-15
-========
-00_7 0.86 %
-03_9 0.0 K # coefs identical
-06_5 0.02 K
-10_7 0.009 K
-13_3 0.008 K
-
+------------------
+Channel Diff Unit
+======= ===== ====
+00_7 0.86 %
+03_9 0.0 K # coefs identical
+06_5 0.02 K
+10_7 0.009 K
+13_3 0.008 K
+======= ===== ====
References:
-[GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf
-[BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf
-[BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf
-[IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html
-[VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html
-[FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf
-[SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html
-[SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html
+- [GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf
+- [BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf
+- [BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf
+- [IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html
+- [VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html
+- [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf
+- [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html
+- [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html
Eumetsat formated netCDF data:
The main differences are:
-1: The geolocation is in a separate file, used for all bands
-2: VIS data is calibrated to Albedo (or reflectance)
-3: IR data is calibrated to radiance.
-4: VIS data is downsampled to IR resolution (4km)
-5: File name differs also slightly
-6: Data is received via EumetCast
+
+1. The geolocation is in a separate file, used for all bands
+2. VIS data is calibrated to Albedo (or reflectance)
+3. IR data is calibrated to radiance.
+4. VIS data is downsampled to IR resolution (4km)
+5. File name differs also slightly
+6. Data is received via EumetCast
"""
diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py
index 46405c741a..c7ab47cc78 100644
--- a/satpy/readers/sar_c_safe.py
+++ b/satpy/readers/sar_c_safe.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Martin Raspaud
+# Copyright (c) 2016-2019 Pytroll developers
# Author(s):
@@ -19,18 +19,37 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""SAFE SAR-C format."""
+"""SAFE SAR-C reader
+*********************
+
+This module implements a reader for Sentinel 1 SAR-C GRD (level1) SAFE format as
+provided by ESA. The format is comprised of a directory containing multiple
+files, most notably two measurement files in geotiff and a few xml files for
+calibration, noise and metadata.
+
+References:
+
+ - *Level 1 Product Formatting*
+ https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-1-sar/products-algorithms/level-1-product-formatting
+
+ - J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won,
+ *"Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"*
+ in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3,
+ pp. 1555-1565, March 2018.
+ doi: `10.1109/TGRS.2017.2765248 `_
+
+"""
import logging
-import os
import xml.etree.ElementTree as ET
import numpy as np
-from osgeo import gdal
+import rasterio
+from rasterio.windows import Window
import dask.array as da
from xarray import DataArray
-import xarray.ufuncs as xu
from dask.base import tokenize
+from threading import Lock
from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
@@ -95,11 +114,50 @@ def read_xml_array(elts, variable_name):
return np.asarray(data), (x, y)
@staticmethod
- def interpolate_xml_array(data, low_res_coords, shape):
+ def read_azimuth_noise_array(elts):
+ """Read the azimuth noise vectors.
+
+ The azimuth noise is normalized per swath to account for gain
+ differences between the swaths in EW mode.
+
+ This is based on the this reference:
+ J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won,
+ "Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"
+ in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3,
+ pp. 1555-1565, March 2018.
+ doi: 10.1109/TGRS.2017.2765248
+ """
+ y = []
+ x = []
+ data = []
+ for elt in elts:
+ first_pixel = int(elt.find('firstRangeSample').text)
+ last_pixel = int(elt.find('lastRangeSample').text)
+ lines = elt.find('line').text.split()
+ lut = elt.find('noiseAzimuthLut').text.split()
+ pixels = [first_pixel, last_pixel]
+ swath = elt.find('swath').text
+ corr = 1
+ if swath == 'EW1':
+ corr = 1.5
+ if swath == 'EW4':
+ corr = 1.2
+ if swath == 'EW5':
+ corr = 1.5
+
+ for pixel in pixels:
+ y += [int(val) for val in lines]
+ x += [pixel] * len(lines)
+ data += [float(val) * corr for val in lut]
+
+ return np.asarray(data), (x, y)
+
+ @staticmethod
+ def interpolate_xml_array(data, low_res_coords, shape, chunks):
"""Interpolate arbitrary size dataset to a full sized grid."""
xpoints, ypoints = low_res_coords
- return interpolate_xarray_linear(xpoints, ypoints, data, shape)
+ return interpolate_xarray_linear(xpoints, ypoints, data, shape, chunks=chunks)
def get_dataset(self, key, info):
"""Load a dataset."""
@@ -124,20 +182,27 @@ def get_dataset(self, key, info):
data = self.interpolate_xml_array(data, low_res_coords, data.shape)
- def get_noise_correction(self, shape):
+ def get_noise_correction(self, shape, chunks=None):
"""Get the noise correction array."""
data_items = self.root.findall(".//noiseVector")
data, low_res_coords = self.read_xml_array(data_items, 'noiseLut')
if not data_items:
data_items = self.root.findall(".//noiseRangeVector")
data, low_res_coords = self.read_xml_array(data_items, 'noiseRangeLut')
- return self.interpolate_xml_array(data, low_res_coords, shape)
+ range_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks)
+ data_items = self.root.findall(".//noiseAzimuthVector")
+ data, low_res_coords = self.read_azimuth_noise_array(data_items)
+ azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks)
+ noise = range_noise * azimuth_noise
+ else:
+ noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks)
+ return noise
- def get_calibration(self, name, shape):
+ def get_calibration(self, name, shape, chunks=None):
"""Get the calibration array."""
data_items = self.root.findall(".//calibrationVector")
data, low_res_coords = self.read_xml_array(data_items, name)
- return self.interpolate_xml_array(data ** 2, low_res_coords, shape)
+ return self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks)
def get_calibration_constant(self):
"""Load the calibration constant."""
@@ -189,16 +254,24 @@ def intp(grid_x, grid_y, interpolator):
return interpolator((grid_y, grid_x))
-def interpolate_xarray_linear(xpoints, ypoints, values, shape):
+def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE):
"""Interpolate linearly, generating a dask array."""
from scipy.interpolate.interpnd import (LinearNDInterpolator,
_ndim_coords_from_arrays)
+
+ if isinstance(chunks, (list, tuple)):
+ vchunks, hchunks = chunks
+ else:
+ vchunks, hchunks = chunks, chunks
+
points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints),
np.asarray(xpoints))).T)
interpolator = LinearNDInterpolator(points, values)
- grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=CHUNK_SIZE),
- da.arange(shape[0], chunks=CHUNK_SIZE))
+
+ grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks),
+ da.arange(shape[0], chunks=vchunks))
+
# workaround for non-thread-safe first call of the interpolator:
interpolator((0, 0))
res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator)
@@ -207,7 +280,12 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape):
class SAFEGRD(BaseFileHandler):
- """Measurement file reader."""
+ """Measurement file reader.
+
+ The measurement files are in geotiff format and read using rasterio. For
+ performance reasons, the reading adapts the chunk size to match the file's
+ block size.
+ """
def __init__(self, filename, filename_info, filetype_info, calfh, noisefh):
super(SAFEGRD, self).__init__(filename, filename_info,
@@ -220,19 +298,13 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh):
self.lats = None
self.lons = None
+ self.alts = None
self.calibration = calfh
self.noise = noisefh
+ self.read_lock = Lock()
- self.get_gdal_filehandle()
-
- def get_gdal_filehandle(self):
- """Try to create the filehandle using gdal."""
- if os.path.exists(self.filename):
- self.filehandle = gdal.Open(self.filename)
- logger.debug("Loading dataset {}".format(self.filename))
- else:
- raise IOError("Path {} does not exist.".format(self.filename))
+ self.filehandle = rasterio.open(self.filename, 'r', sharing=False)
def get_dataset(self, key, info):
"""Load a dataset."""
@@ -245,65 +317,115 @@ def get_dataset(self, key, info):
logger.debug('Constructing coordinate arrays.')
if self.lons is None or self.lats is None:
- self.lons, self.lats = self.get_lonlats()
+ self.lons, self.lats, self.alts = self.get_lonlatalts()
if key.name == 'latitude':
data = self.lats
else:
data = self.lons
- data.attrs = info
+ data.attrs.update(info)
else:
+ calibration = key.calibration or 'gamma'
+ if calibration == 'sigma_nought':
+ calibration = 'sigmaNought'
+ elif calibration == 'beta_nought':
+ calibration = 'betaNought'
+
data = self.read_band()
+ # chunks = data.chunks # This seems to be slower for some reason
+ chunks = CHUNK_SIZE
logger.debug('Reading noise data.')
-
- noise = self.noise.get_noise_correction(data.shape)
+ noise = self.noise.get_noise_correction(data.shape, chunks=chunks).fillna(0)
logger.debug('Reading calibration data.')
- cal = self.calibration.get_calibration('gamma', data.shape)
+ cal = self.calibration.get_calibration(calibration, data.shape, chunks=chunks)
cal_constant = self.calibration.get_calibration_constant()
logger.debug('Calibrating.')
-
+ data = data.where(data > 0)
data = data.astype(np.float64)
- data = (data * data + cal_constant - noise) / cal
+ dn = data * data
+ data = ((dn - noise).clip(min=0) + cal_constant)
- data = xu.sqrt(data.clip(min=0))
-
- data.attrs = info
+ data = (np.sqrt(data) / cal).clip(min=0)
+ data.attrs.update(info)
del noise, cal
- data.attrs['units'] = 'sigma'
+ data.attrs['units'] = calibration
return data
+ def read_band_blocks(self, blocksize=CHUNK_SIZE):
+ """Read the band in native blocks."""
+ # For sentinel 1 data, the block are 1 line, and dask seems to choke on that.
+ band = self.filehandle
+
+ shape = band.shape
+ token = tokenize(blocksize, band)
+ name = 'read_band-' + token
+ dskx = dict()
+ if len(band.block_shapes) != 1:
+ raise NotImplementedError('Bands with multiple shapes not supported.')
+ else:
+ chunks = band.block_shapes[0]
+
+ def do_read(the_band, the_window, the_lock):
+ with the_lock:
+ return the_band.read(1, None, window=the_window)
+
+ for ji, window in band.block_windows(1):
+ dskx[(name, ) + ji] = (do_read, band, window, self.read_lock)
+
+ res = da.Array(dskx, name, shape=list(shape),
+ chunks=chunks,
+ dtype=band.dtypes[0])
+ return DataArray(res, dims=('y', 'x'))
+
def read_band(self, blocksize=CHUNK_SIZE):
- """Read the band in blocks."""
+ """Read the band in chunks."""
band = self.filehandle
- shape = band.RasterYSize, band.RasterXSize
- vchunks = range(0, shape[0], blocksize)
- hchunks = range(0, shape[1], blocksize)
+ shape = band.shape
+ if len(band.block_shapes) == 1:
+ total_size = blocksize * blocksize * 1.0
+ lines, cols = band.block_shapes[0]
+ if cols > lines:
+ hblocks = cols
+ vblocks = int(total_size / cols / lines)
+ else:
+ hblocks = int(total_size / cols / lines)
+ vblocks = lines
+ else:
+ hblocks = blocksize
+ vblocks = blocksize
+ vchunks = range(0, shape[0], vblocks)
+ hchunks = range(0, shape[1], hblocks)
- token = tokenize(blocksize, band)
+ token = tokenize(hblocks, vblocks, band)
name = 'read_band-' + token
- dskx = {(name, i, j): (band.GetRasterBand(1).ReadAsArray,
- hcs, vcs,
- min(blocksize, shape[1] - hcs),
- min(blocksize, shape[0] - vcs))
+ def do_read(the_band, the_window, the_lock):
+ with the_lock:
+ return the_band.read(1, None, window=the_window)
+
+ dskx = {(name, i, j): (do_read, band,
+ Window(hcs, vcs,
+ min(hblocks, shape[1] - hcs),
+ min(vblocks, shape[0] - vcs)),
+ self.read_lock)
for i, vcs in enumerate(vchunks)
for j, hcs in enumerate(hchunks)
}
res = da.Array(dskx, name, shape=list(shape),
- chunks=(blocksize, blocksize),
- dtype=np.uint16)
+ chunks=(vblocks, hblocks),
+ dtype=band.dtypes[0])
return DataArray(res, dims=('y', 'x'))
- def get_lonlats(self):
+ def get_lonlatalts(self):
"""Obtain GCPs and construct latitude and longitude arrays.
Args:
@@ -314,17 +436,23 @@ def get_lonlats(self):
"""
band = self.filehandle
- shape = band.RasterYSize, band.RasterXSize
-
- (xpoints, ypoints), (gcp_lons, gcp_lats) = self.get_gcps()
+ (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps()
# FIXME: do interpolation on cartesion coordinates if the area is
# problematic.
- longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, shape)
- latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, shape)
+ longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape)
+ latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape)
+ altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape)
+
+ longitudes.attrs['gcps'] = gcps
+ longitudes.attrs['crs'] = crs
+ latitudes.attrs['gcps'] = gcps
+ latitudes.attrs['crs'] = crs
+ altitudes.attrs['gcps'] = gcps
+ altitudes.attrs['crs'] = crs
- return longitudes, latitudes
+ return longitudes, latitudes, altitudes
def get_gcps(self):
"""Read GCP from the GDAL band.
@@ -338,18 +466,18 @@ def get_gcps(self):
gcp_coords (tuple): longitude and latitude 1d arrays
"""
- gcps = self.filehandle.GetGCPs()
+ gcps = self.filehandle.gcps
- gcp_array = np.array(
- [(p.GCPLine, p.GCPPixel, p.GCPY, p.GCPX) for p in gcps])
+ gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]])
ypoints = np.unique(gcp_array[:, 0])
xpoints = np.unique(gcp_array[:, 1])
- gcp_lats = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0])
- gcp_lons = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0])
+ gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0])
+ gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0])
+ gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0])
- return (xpoints, ypoints), (gcp_lons, gcp_lats)
+ return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps
@property
def start_time(self):
diff --git a/satpy/tests/reader_tests/__init__.py b/satpy/tests/reader_tests/__init__.py
index 8a2aeba50e..c7f0ab2649 100644
--- a/satpy/tests/reader_tests/__init__.py
+++ b/satpy/tests/reader_tests/__init__.py
@@ -37,7 +37,7 @@
test_scmi, test_ahi_hrit, test_goes_imager_nc,
test_nc_slstr, test_olci_nc,
test_viirs_edr_flood, test_nwcsaf_nc,
- test_seviri_l1b_hrit)
+ test_seviri_l1b_hrit, test_sar_c_safe)
if sys.version_info < (2, 7):
@@ -79,5 +79,6 @@ def suite():
mysuite.addTests(test_nc_slstr.suite())
mysuite.addTests(test_nwcsaf_nc.suite())
mysuite.addTests(test_seviri_l1b_hrit.suite())
+ mysuite.addTests(test_sar_c_safe.suite())
return mysuite
diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py
new file mode 100644
index 0000000000..c644a5ab0d
--- /dev/null
+++ b/satpy/tests/reader_tests/test_sar_c_safe.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2019 Pytroll developers
+
+# Author(s):
+
+# Martin Raspaud
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+"""Module for testing the satpy.readers.sar-c_safe module.
+"""
+import sys
+
+if sys.version_info < (2, 7):
+ import unittest2 as unittest
+else:
+ import unittest
+
+try:
+ import unittest.mock as mock
+except ImportError:
+ import mock
+
+
+class TestSAFEGRD(unittest.TestCase):
+ """Test various nc_slstr file handlers."""
+ @mock.patch('rasterio.open')
+ def test_instantiate(self, mocked_dataset):
+ """Test initialization of file handlers."""
+ from satpy.readers.sar_c_safe import SAFEGRD
+
+ filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0,
+ 'polarization': 'vv'}
+ filetype_info = 'bla'
+ noisefh = mock.MagicMock()
+ calfh = mock.MagicMock()
+
+ test = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/s1a-iw-grd'
+ '-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff',
+ filename_info, filetype_info, calfh, noisefh)
+ assert(test._polarization == 'vv')
+ assert(test.calibration == calfh)
+ assert(test.noise == noisefh)
+ mocked_dataset.assert_called()
+
+
+def suite():
+ """The test suite for test_sar_c_safe."""
+ loader = unittest.TestLoader()
+ mysuite = unittest.TestSuite()
+ mysuite.addTest(loader.loadTestsFromTestCase(TestSAFEGRD))
+ return mysuite
+
+
+if __name__ == '__main__':
+ unittest.main()