diff --git a/ci/requirements/min-all-deps.yml b/ci/requirements/min-all-deps.yml
index 76e2b28093d..ecabde06622 100644
--- a/ci/requirements/min-all-deps.yml
+++ b/ci/requirements/min-all-deps.yml
@@ -10,46 +10,45 @@ dependencies:
- python=3.8
- boto3=1.13
- bottleneck=1.3
- # cartopy 0.18 conflicts with pynio
- - cartopy=0.17
+ - cartopy=0.19
- cdms2=3.1
- cfgrib=0.9
- - cftime=1.2
+ - cftime=1.4
- coveralls
- - dask-core=2.30
- - distributed=2.30
- - h5netcdf=0.8
- - h5py=2.10
- # hdf5 1.12 conflicts with h5py=2.10
+ - dask-core=2021.04
+ - distributed=2021.04
+ - h5netcdf=0.11
+ - h5py=3.1
+ # hdf5 1.12 conflicts with h5py=3.1
- hdf5=1.10
- hypothesis
- iris=2.4
- lxml=4.6 # Optional dep of pydap
- - matplotlib-base=3.3
+ - matplotlib-base=3.4
- nc-time-axis=1.2
# netcdf follows a 1.major.minor[.patch] convention
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
# bumping the netCDF4 version is currently blocked by #4491
- netcdf4=1.5.3
- - numba=0.51
- - numpy=1.18
+ - numba=0.53
+ - numpy=1.19
- packaging=20.0
- - pandas=1.1
- - pint=0.16
+ - pandas=1.2
+ - pint=0.17
- pip
- pseudonetcdf=3.1
- pydap=3.2
- - pynio=1.5
+ # - pynio=1.5.5
- pytest
- pytest-cov
- pytest-env
- pytest-xdist
- - rasterio=1.1
- - scipy=1.5
+ - rasterio=1.2
+ - scipy=1.6
- seaborn=0.11
- - sparse=0.11
+ - sparse=0.12
- toolz=0.11
- typing_extensions=3.7
- - zarr=2.5
+ - zarr=2.8
- pip:
- numbagg==0.1
diff --git a/doc/getting-started-guide/installing.rst b/doc/getting-started-guide/installing.rst
index 0668853946f..faa0fba5dd3 100644
--- a/doc/getting-started-guide/installing.rst
+++ b/doc/getting-started-guide/installing.rst
@@ -7,9 +7,9 @@ Required dependencies
---------------------
- Python (3.8 or later)
-- `numpy `__ (1.18 or later)
+- `numpy `__ (1.19 or later)
- `packaging `__ (20.0 or later)
-- `pandas `__ (1.1 or later)
+- `pandas `__ (1.2 or later)
.. _optional-dependencies:
diff --git a/doc/whats-new.rst b/doc/whats-new.rst
index 4882402073c..0d8ab5a8b40 100644
--- a/doc/whats-new.rst
+++ b/doc/whats-new.rst
@@ -45,6 +45,26 @@ New Features
Breaking changes
~~~~~~~~~~~~~~~~
+- PyNIO support is now untested. The minimum versions of some dependencies were changed:
+
+ =============== ===== ====
+ Package Old New
+ =============== ===== ====
+ cftime 1.2 1.4
+ dask 2.30 2021.4
+ distributed 2.30 2021.4
+ h5netcdf 0.8 0.11
+ matplotlib-base 3.3 3.4
+ numba 0.51 0.53
+ numpy 1.18 1.19
+ pandas 1.1 1.2
+ pint 0.16 0.17
+ rasterio 1.1 1.2
+ scipy 1.5 1.6
+ sparse 0.11 0.12
+ zarr 2.5 2.8
+ =============== ===== ====
+
- The Dataset and DataArray ``rename*`` methods do not implicitly add or drop
indexes. (:pull:`5692`).
By `BenoƮt Bovy `_.
diff --git a/setup.cfg b/setup.cfg
index 05b202810b4..6a0a06d2367 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -75,8 +75,8 @@ zip_safe = False # https://mypy.readthedocs.io/en/latest/installed_packages.htm
include_package_data = True
python_requires = >=3.8
install_requires =
- numpy >= 1.18
- pandas >= 1.1
+ numpy >= 1.19
+ pandas >= 1.2
packaging >= 20.0
[options.extras_require]
diff --git a/xarray/core/dask_array_compat.py b/xarray/core/dask_array_compat.py
index 0e0229cc3ca..4d73867a283 100644
--- a/xarray/core/dask_array_compat.py
+++ b/xarray/core/dask_array_compat.py
@@ -1,9 +1,6 @@
import warnings
import numpy as np
-from packaging.version import Version
-
-from .pycompat import dask_version
try:
import dask.array as da
@@ -57,127 +54,7 @@ def pad(array, pad_width, mode="constant", **kwargs):
return padded
-if dask_version > Version("2.30.0"):
- ensure_minimum_chunksize = da.overlap.ensure_minimum_chunksize
-else:
-
- # copied from dask
- def ensure_minimum_chunksize(size, chunks):
- """Determine new chunks to ensure that every chunk >= size
-
- Parameters
- ----------
- size : int
- The maximum size of any chunk.
- chunks : tuple
- Chunks along one axis, e.g. ``(3, 3, 2)``
-
- Examples
- --------
- >>> ensure_minimum_chunksize(10, (20, 20, 1))
- (20, 11, 10)
- >>> ensure_minimum_chunksize(3, (1, 1, 3))
- (5,)
-
- See Also
- --------
- overlap
- """
- if size <= min(chunks):
- return chunks
-
- # add too-small chunks to chunks before them
- output = []
- new = 0
- for c in chunks:
- if c < size:
- if new > size + (size - c):
- output.append(new - (size - c))
- new = size
- else:
- new += c
- if new >= size:
- output.append(new)
- new = 0
- if c >= size:
- new += c
- if new >= size:
- output.append(new)
- elif len(output) >= 1:
- output[-1] += new
- else:
- raise ValueError(
- f"The overlapping depth {size} is larger than your "
- f"array {sum(chunks)}."
- )
-
- return tuple(output)
-
-
-if dask_version > Version("2021.03.0"):
+if da is not None:
sliding_window_view = da.lib.stride_tricks.sliding_window_view
else:
-
- def sliding_window_view(x, window_shape, axis=None):
- from dask.array.overlap import map_overlap
- from numpy.core.numeric import normalize_axis_tuple
-
- from .npcompat import sliding_window_view as _np_sliding_window_view
-
- window_shape = (
- tuple(window_shape) if np.iterable(window_shape) else (window_shape,)
- )
-
- window_shape_array = np.array(window_shape)
- if np.any(window_shape_array <= 0):
- raise ValueError("`window_shape` must contain positive values")
-
- if axis is None:
- axis = tuple(range(x.ndim))
- if len(window_shape) != len(axis):
- raise ValueError(
- f"Since axis is `None`, must provide "
- f"window_shape for all dimensions of `x`; "
- f"got {len(window_shape)} window_shape elements "
- f"and `x.ndim` is {x.ndim}."
- )
- else:
- axis = normalize_axis_tuple(axis, x.ndim, allow_duplicate=True)
- if len(window_shape) != len(axis):
- raise ValueError(
- f"Must provide matching length window_shape and "
- f"axis; got {len(window_shape)} window_shape "
- f"elements and {len(axis)} axes elements."
- )
-
- depths = [0] * x.ndim
- for ax, window in zip(axis, window_shape):
- depths[ax] += window - 1
-
- # Ensure that each chunk is big enough to leave at least a size-1 chunk
- # after windowing (this is only really necessary for the last chunk).
- safe_chunks = tuple(
- ensure_minimum_chunksize(d + 1, c) for d, c in zip(depths, x.chunks)
- )
- x = x.rechunk(safe_chunks)
-
- # result.shape = x_shape_trimmed + window_shape,
- # where x_shape_trimmed is x.shape with every entry
- # reduced by one less than the corresponding window size.
- # trim chunks to match x_shape_trimmed
- newchunks = tuple(
- c[:-1] + (c[-1] - d,) for d, c in zip(depths, x.chunks)
- ) + tuple((window,) for window in window_shape)
-
- kwargs = dict(
- depth=tuple((0, d) for d in depths), # Overlap on +ve side only
- boundary="none",
- meta=x._meta,
- new_axis=range(x.ndim, x.ndim + len(axis)),
- chunks=newchunks,
- trim=False,
- window_shape=window_shape,
- axis=axis,
- )
-
- return map_overlap(_np_sliding_window_view, x, align_arrays=False, **kwargs)
+ sliding_window_view = None