Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into feature/fix-interp-…
Browse files Browse the repository at this point in the history
…docs

Fixes merge with pydata#6637
  • Loading branch information
Louis Stenger committed May 31, 2022
2 parents 2030c80 + 4615074 commit 0df596e
Show file tree
Hide file tree
Showing 14 changed files with 1,213 additions and 951 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
*.py[cod]
__pycache__
.env
.venv

# example caches from Hypothesis
.hypothesis/
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ repos:
# - id: velin
# args: ["--write", "--compact"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.950
rev: v0.960
hooks:
- id: mypy
# Copied from setup.cfg
Expand Down
2 changes: 1 addition & 1 deletion HOW_TO_RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,4 +111,4 @@ upstream https://github.com/pydata/xarray (push)

As of 2022.03.0, we utilize the [CALVER](https://calver.org/) version system.
Specifically, we have adopted the pattern `YYYY.MM.X`, where `YYYY` is a 4-digit
year (e.g. `2022`), `MM` is a 2-digit zero-padded month (e.g. `01` for January), and `X` is the release number (starting at zero at the start of each month and incremented once for each additional release).
year (e.g. `2022`), `0M` is a 2-digit zero-padded month (e.g. `01` for January), and `X` is the release number (starting at zero at the start of each month and incremented once for each additional release).
13 changes: 9 additions & 4 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@
T_NetcdfEngine,
Literal["pydap", "pynio", "pseudonetcdf", "cfgrib", "zarr"],
Type[BackendEntrypoint],
str, # no nice typing support for custom backends
None,
]
T_Chunks = Union[int, dict[Any, Any], Literal["auto"], None]
T_NetcdfTypes = Literal[
Expand Down Expand Up @@ -392,7 +394,8 @@ def open_dataset(
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
"pseudonetcdf", "zarr"} or subclass of xarray.backends.BackendEntrypoint, optional
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
is chosen based on available dependencies, with a preference for
"netcdf4". A custom backend class (a subclass of ``BackendEntrypoint``)
Expand Down Expand Up @@ -579,7 +582,8 @@ def open_dataarray(
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
"pseudonetcdf", "zarr"}, optional
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
is chosen based on available dependencies, with a preference for
"netcdf4".
Expand Down Expand Up @@ -804,8 +808,9 @@ def open_mfdataset(
If provided, call this function on each dataset prior to concatenation.
You can find the file-name from which each dataset was loaded in
``ds.encoding["source"]``.
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", "zarr"}, \
optional
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
is chosen based on available dependencies, with a preference for
"netcdf4".
Expand Down
28 changes: 17 additions & 11 deletions xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
Tuple,
Type,
TypeVar,
cast,
)

import numpy as np
Expand All @@ -30,7 +31,7 @@
if TYPE_CHECKING:
from .dataarray import DataArray
from .dataset import Dataset
from .types import JoinOptions
from .types import JoinOptions, T_DataArray, T_DataArrayOrSet, T_Dataset

DataAlignable = TypeVar("DataAlignable", bound=DataWithCoords)

Expand Down Expand Up @@ -559,7 +560,7 @@ def align(self) -> None:
def align(
*objects: DataAlignable,
join: JoinOptions = "inner",
copy=True,
copy: bool = True,
indexes=None,
exclude=frozenset(),
fill_value=dtypes.NA,
Expand Down Expand Up @@ -592,7 +593,7 @@ def align(
those of the first object with that dimension. Indexes for the same
dimension must have the same size in all objects.
copy : bool, optional
copy : bool, default: True
If ``copy=True``, data in the return values is always copied. If
``copy=False`` and reindexing is unnecessary, or can be performed with
only slice operations, then the output may share memory with the input.
Expand All @@ -609,7 +610,7 @@ def align(
Returns
-------
aligned : DataArray or Dataset
aligned : tuple of DataArray or Dataset
Tuple of objects with the same type as `*objects` with aligned
coordinates.
Expand Down Expand Up @@ -935,7 +936,9 @@ def _get_broadcast_dims_map_common_coords(args, exclude):
return dims_map, common_coords


def _broadcast_helper(arg, exclude, dims_map, common_coords):
def _broadcast_helper(
arg: T_DataArrayOrSet, exclude, dims_map, common_coords
) -> T_DataArrayOrSet:

from .dataarray import DataArray
from .dataset import Dataset
Expand All @@ -950,22 +953,25 @@ def _set_dims(var):

return var.set_dims(var_dims_map)

def _broadcast_array(array):
def _broadcast_array(array: T_DataArray) -> T_DataArray:
data = _set_dims(array.variable)
coords = dict(array.coords)
coords.update(common_coords)
return DataArray(data, coords, data.dims, name=array.name, attrs=array.attrs)
return array.__class__(
data, coords, data.dims, name=array.name, attrs=array.attrs
)

def _broadcast_dataset(ds):
def _broadcast_dataset(ds: T_Dataset) -> T_Dataset:
data_vars = {k: _set_dims(ds.variables[k]) for k in ds.data_vars}
coords = dict(ds.coords)
coords.update(common_coords)
return Dataset(data_vars, coords, ds.attrs)
return ds.__class__(data_vars, coords, ds.attrs)

# remove casts once https://github.com/python/mypy/issues/12800 is resolved
if isinstance(arg, DataArray):
return _broadcast_array(arg)
return cast("T_DataArrayOrSet", _broadcast_array(arg))
elif isinstance(arg, Dataset):
return _broadcast_dataset(arg)
return cast("T_DataArrayOrSet", _broadcast_dataset(arg))
else:
raise ValueError("all input must be Dataset or DataArray objects")

Expand Down
Loading

0 comments on commit 0df596e

Please sign in to comment.