Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump min deps #6559

Merged
merged 13 commits into from
May 5, 2022
35 changes: 17 additions & 18 deletions ci/requirements/min-all-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,46 +10,45 @@ dependencies:
- python=3.8
- boto3=1.13
- bottleneck=1.3
# cartopy 0.18 conflicts with pynio
- cartopy=0.17
- cartopy=0.19
- cdms2=3.1
- cfgrib=0.9
- cftime=1.2
- cftime=1.4
- coveralls
- dask-core=2.30
- distributed=2.30
- h5netcdf=0.8
- h5py=2.10
# hdf5 1.12 conflicts with h5py=2.10
- dask-core=2021.04
- distributed=2021.04
- h5netcdf=0.11
- h5py=3.1
# hdf5 1.12 conflicts with h5py=3.1
- hdf5=1.10
- hypothesis
- iris=2.4
- lxml=4.6 # Optional dep of pydap
- matplotlib-base=3.3
- matplotlib-base=3.4
- nc-time-axis=1.2
# netcdf follows a 1.major.minor[.patch] convention
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
# bumping the netCDF4 version is currently blocked by #4491
- netcdf4=1.5.3
- numba=0.51
- numpy=1.18
- numba=0.53
- numpy=1.19
- packaging=20.0
- pandas=1.1
- pint=0.16
- pandas=1.2
- pint=0.17
- pip
- pseudonetcdf=3.1
- pydap=3.2
- pynio=1.5
# - pynio=1.5.5
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@fulminemizzega If you can figure out a combination that works with this version of the file, we could re-enable.

We'd also be happy re-enabling it in environment.yml as long as it doesn't restrict other packages.

- pytest
- pytest-cov
- pytest-env
- pytest-xdist
- rasterio=1.1
- scipy=1.5
- rasterio=1.2
- scipy=1.6
- seaborn=0.11
- sparse=0.11
- sparse=0.12
- toolz=0.11
- typing_extensions=3.7
- zarr=2.5
- zarr=2.8
- pip:
- numbagg==0.1
4 changes: 2 additions & 2 deletions doc/getting-started-guide/installing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ Required dependencies
---------------------

- Python (3.8 or later)
- `numpy <https://www.numpy.org/>`__ (1.18 or later)
- `numpy <https://www.numpy.org/>`__ (1.19 or later)
- `packaging <https://packaging.pypa.io/en/latest/#>`__ (20.0 or later)
- `pandas <https://pandas.pydata.org/>`__ (1.1 or later)
- `pandas <https://pandas.pydata.org/>`__ (1.2 or later)

.. _optional-dependencies:

Expand Down
20 changes: 20 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,26 @@ New Features
Breaking changes
~~~~~~~~~~~~~~~~

- PyNIO support is now untested. The minimum versions of some dependencies were changed:

=============== ===== ====
Package Old New
=============== ===== ====
cftime 1.2 1.4
dask 2.30 2021.4
distributed 2.30 2021.4
h5netcdf 0.8 0.11
matplotlib-base 3.3 3.4
numba 0.51 0.53
numpy 1.18 1.19
pandas 1.1 1.2
pint 0.16 0.17
rasterio 1.1 1.2
scipy 1.5 1.6
sparse 0.11 0.12
zarr 2.5 2.8
=============== ===== ====

- The Dataset and DataArray ``rename*`` methods do not implicitly add or drop
indexes. (:pull:`5692`).
By `Benoît Bovy <https://github.com/benbovy>`_.
Expand Down
4 changes: 2 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ zip_safe = False # https://mypy.readthedocs.io/en/latest/installed_packages.htm
include_package_data = True
python_requires = >=3.8
install_requires =
numpy >= 1.18
pandas >= 1.1
numpy >= 1.19
pandas >= 1.2
packaging >= 20.0

[options.extras_require]
Expand Down
127 changes: 2 additions & 125 deletions xarray/core/dask_array_compat.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import warnings

import numpy as np
from packaging.version import Version

from .pycompat import dask_version

try:
import dask.array as da
Expand Down Expand Up @@ -57,127 +54,7 @@ def pad(array, pad_width, mode="constant", **kwargs):
return padded


if dask_version > Version("2.30.0"):
ensure_minimum_chunksize = da.overlap.ensure_minimum_chunksize
else:

# copied from dask
def ensure_minimum_chunksize(size, chunks):
"""Determine new chunks to ensure that every chunk >= size

Parameters
----------
size : int
The maximum size of any chunk.
chunks : tuple
Chunks along one axis, e.g. ``(3, 3, 2)``

Examples
--------
>>> ensure_minimum_chunksize(10, (20, 20, 1))
(20, 11, 10)
>>> ensure_minimum_chunksize(3, (1, 1, 3))
(5,)

See Also
--------
overlap
"""
if size <= min(chunks):
return chunks

# add too-small chunks to chunks before them
output = []
new = 0
for c in chunks:
if c < size:
if new > size + (size - c):
output.append(new - (size - c))
new = size
else:
new += c
if new >= size:
output.append(new)
new = 0
if c >= size:
new += c
if new >= size:
output.append(new)
elif len(output) >= 1:
output[-1] += new
else:
raise ValueError(
f"The overlapping depth {size} is larger than your "
f"array {sum(chunks)}."
)

return tuple(output)


if dask_version > Version("2021.03.0"):
if da is not None:
sliding_window_view = da.lib.stride_tricks.sliding_window_view
else:

def sliding_window_view(x, window_shape, axis=None):
from dask.array.overlap import map_overlap
from numpy.core.numeric import normalize_axis_tuple

from .npcompat import sliding_window_view as _np_sliding_window_view

window_shape = (
tuple(window_shape) if np.iterable(window_shape) else (window_shape,)
)

window_shape_array = np.array(window_shape)
if np.any(window_shape_array <= 0):
raise ValueError("`window_shape` must contain positive values")

if axis is None:
axis = tuple(range(x.ndim))
if len(window_shape) != len(axis):
raise ValueError(
f"Since axis is `None`, must provide "
f"window_shape for all dimensions of `x`; "
f"got {len(window_shape)} window_shape elements "
f"and `x.ndim` is {x.ndim}."
)
else:
axis = normalize_axis_tuple(axis, x.ndim, allow_duplicate=True)
if len(window_shape) != len(axis):
raise ValueError(
f"Must provide matching length window_shape and "
f"axis; got {len(window_shape)} window_shape "
f"elements and {len(axis)} axes elements."
)

depths = [0] * x.ndim
for ax, window in zip(axis, window_shape):
depths[ax] += window - 1

# Ensure that each chunk is big enough to leave at least a size-1 chunk
# after windowing (this is only really necessary for the last chunk).
safe_chunks = tuple(
ensure_minimum_chunksize(d + 1, c) for d, c in zip(depths, x.chunks)
)
x = x.rechunk(safe_chunks)

# result.shape = x_shape_trimmed + window_shape,
# where x_shape_trimmed is x.shape with every entry
# reduced by one less than the corresponding window size.
# trim chunks to match x_shape_trimmed
newchunks = tuple(
c[:-1] + (c[-1] - d,) for d, c in zip(depths, x.chunks)
) + tuple((window,) for window in window_shape)

kwargs = dict(
depth=tuple((0, d) for d in depths), # Overlap on +ve side only
boundary="none",
meta=x._meta,
new_axis=range(x.ndim, x.ndim + len(axis)),
chunks=newchunks,
trim=False,
window_shape=window_shape,
axis=axis,
)

return map_overlap(_np_sliding_window_view, x, align_arrays=False, **kwargs)
sliding_window_view = None