Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change default CF writer engine to follow xarray defaults #844

Merged
merged 3 commits into from
Jul 4, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,6 @@ def __getattr__(cls, name):
'trollsift': ('https://trollsift.readthedocs.io/en/stable', None),
'trollimage': ('https://trollimage.readthedocs.io/en/stable', None),
'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None),
'geoviews': ('http://geo.holoviews.org', None),
'geoviews': ('http://geoviews.org', None),
'pyproj': ('https://pyproj4.github.io/pyproj/dev', None)
}
2 changes: 1 addition & 1 deletion satpy/readers/modis_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
Geolocation files
-----------------

Similar to the ``modis_l1b` reader the geolocation files (mod03) for the 1km data are optional and if not
Similar to the ``modis_l1b`` reader the geolocation files (mod03) for the 1km data are optional and if not
given 1km geolocations will be interpolated from the 5km geolocation contained within the file.

For the 500m and 250m data geolocation files are needed.
Expand Down
28 changes: 10 additions & 18 deletions satpy/tests/writer_tests/test_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,12 @@ def test_save_array(self):
prerequisites=[DatasetID('hej')]))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename) as f:
self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3]))
expected_prereq = ("DatasetID(name='hej', wavelength=None, "
"resolution=None, polarization=None, "
"calibration=None, level=None, modifiers=())")
self.assertEqual(f['test-array'].attrs['prerequisites'][0],
self.assertEqual(f['test-array'].attrs['prerequisites'],
expected_prereq)

def test_save_with_compression(self):
Expand Down Expand Up @@ -136,8 +135,7 @@ def test_save_array_coords(self):
prerequisites=[DatasetID('hej')]))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename) as f:
self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3]))
self.assertTrue(np.all(f['x'][:] == [0, 1, 2]))
self.assertTrue(np.all(f['y'][:] == [0]))
Expand All @@ -147,7 +145,7 @@ def test_save_array_coords(self):
expected_prereq = ("DatasetID(name='hej', wavelength=None, "
"resolution=None, polarization=None, "
"calibration=None, level=None, modifiers=())")
self.assertEqual(f['test-array'].attrs['prerequisites'][0],
self.assertEqual(f['test-array'].attrs['prerequisites'],
expected_prereq)

def test_groups(self):
Expand Down Expand Up @@ -222,8 +220,7 @@ def test_single_time_value(self):
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename, decode_cf=False) as f:
self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))

def test_bounds(self):
Expand All @@ -241,8 +238,7 @@ def test_bounds(self):
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename, decode_cf=False) as f:
self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))

def test_bounds_minimum(self):
Expand All @@ -268,8 +264,7 @@ def test_bounds_minimum(self):
end_time=end_timeB))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename, decode_cf=False) as f:
self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))

def test_bounds_missing_time_info(self):
Expand All @@ -291,8 +286,7 @@ def test_bounds_missing_time_info(self):
coords={'time': [np.datetime64('2018-05-30T10:05:00')]})
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename, decode_cf=False) as f:
self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))

def test_encoding_kwarg(self):
Expand All @@ -311,8 +305,7 @@ def test_encoding_kwarg(self):
'add_offset': 0.0,
'_FillValue': 3}}
scn.save_datasets(filename=filename, encoding=encoding, writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename, mask_and_scale=False) as f:
self.assertTrue(np.all(f['test-array'][:] == [10, 20, 30]))
self.assertTrue(f['test-array'].attrs['scale_factor'] == 0.1)
self.assertTrue(f['test-array'].attrs['_FillValue'] == 3)
Expand All @@ -335,8 +328,7 @@ def test_header_attrs(self):
scn.save_datasets(filename=filename,
header_attrs=header_attrs,
writer='cf')
import h5netcdf as nc4
with nc4.File(filename) as f:
with xr.open_dataset(filename) as f:
self.assertTrue(f.attrs['sensor'] == 'SEVIRI')
self.assertTrue('sensor' in f.attrs.keys())
self.assertTrue('orbit' not in f.attrs.keys())
Expand Down
8 changes: 5 additions & 3 deletions satpy/writers/cf_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
np.dtype('int64'), np.dtype('uint64'),
np.dtype('float32'), np.dtype('float64'),
np.string_]
"""Numpy datatypes compatible with all netCDF4 backends. np.unicode_ is excluded because h5py (and thus h5netcdf)
"""Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is excluded because h5py (and thus h5netcdf)
has problems with unicode, see https://github.com/h5py/h5py/issues/624."""


Expand Down Expand Up @@ -570,7 +570,7 @@ def update_encoding(self, datasets, to_netcdf_kwargs):

return encoding, other_to_netcdf_kwargs

def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine='h5netcdf', epoch=EPOCH,
def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH,
flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False,
compression=None, **to_netcdf_kwargs):
"""Save the given datasets in one netCDF file.
Expand All @@ -589,7 +589,9 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
header_attrs:
Global attributes to be included
engine (str):
Module to be used for writing netCDF files
Module to be used for writing netCDF files. Follows xarray's
:meth:`~xarray.Dataset.to_netcdf` engine choices with a
preference for 'netcdf4'.
epoch (str):
Reference time for encoding of time coordinates
flatten_attrs (bool):
Expand Down