Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Dataset/DataArray.isel with drop=True and scalar DataArray indexes #6579

Merged
merged 10 commits into from
May 10, 2022
3 changes: 3 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,9 @@ Bug fixes
:pull:`6489`). By `Spencer Clark <https://github.com/spencerkclark>`_.
- Dark themes are now properly detected in Furo-themed Sphinx documents (:issue:`6500`, :pull:`6501`).
By `Kevin Paul <https://github.com/kmpaul>`_.
- :py:meth:`isel` with `drop=True` works as intended with scalar :py:class:`DataArray` indexers.
(:issue:`6554`, :pull:`6579`)
By `Michael Niklas <https://github.com/headtr1ck>`_.

Documentation
~~~~~~~~~~~~~
Expand Down
10 changes: 5 additions & 5 deletions xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
except ImportError:
iris_Cube = None

from .types import T_DataArray, T_Xarray
from .types import ErrorChoice, ErrorChoiceWithWarn, T_DataArray, T_Xarray


def _infer_coords_and_dims(
Expand Down Expand Up @@ -1170,7 +1170,7 @@ def isel(
self,
indexers: Mapping[Any, Any] = None,
drop: bool = False,
missing_dims: str = "raise",
missing_dims: ErrorChoiceWithWarn = "raise",
**indexers_kwargs: Any,
) -> DataArray:
"""Return a new DataArray whose data is given by integer indexing
Expand All @@ -1185,7 +1185,7 @@ def isel(
If DataArrays are passed as indexers, xarray-style indexing will be
carried out. See :ref:`indexing` for the details.
One of indexers or indexers_kwargs must be provided.
drop : bool, optional
drop : bool, default False
If ``drop=True``, drop coordinates variables indexed by integers
instead of making them scalar.
missing_dims : {"raise", "warn", "ignore"}, default: "raise"
Expand Down Expand Up @@ -2385,7 +2385,7 @@ def T(self) -> DataArray:
return self.transpose()

def drop_vars(
self, names: Hashable | Iterable[Hashable], *, errors: str = "raise"
self, names: Hashable | Iterable[Hashable], *, errors: ErrorChoice = "raise"
) -> DataArray:
"""Returns an array with dropped variables.

Expand Down Expand Up @@ -4586,7 +4586,7 @@ def query(
queries: Mapping[Any, Any] = None,
parser: str = "pandas",
engine: str = None,
missing_dims: str = "raise",
missing_dims: ErrorChoiceWithWarn = "raise",
**queries_kwargs: Any,
) -> DataArray:
"""Return a new data array indexed along the specified
Expand Down
16 changes: 9 additions & 7 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
from ..backends import AbstractDataStore, ZarrStore
from .dataarray import DataArray
from .merge import CoercibleMapping
from .types import T_Xarray
from .types import ErrorChoiceWithWarn, T_Xarray

try:
from dask.delayed import Delayed
Expand Down Expand Up @@ -2163,7 +2163,7 @@ def isel(
self,
indexers: Mapping[Any, Any] = None,
drop: bool = False,
missing_dims: str = "raise",
missing_dims: ErrorChoiceWithWarn = "raise",
**indexers_kwargs: Any,
) -> Dataset:
"""Returns a new dataset with each array indexed along the specified
Expand All @@ -2182,14 +2182,14 @@ def isel(
If DataArrays are passed as indexers, xarray-style indexing will be
carried out. See :ref:`indexing` for the details.
One of indexers or indexers_kwargs must be provided.
drop : bool, optional
drop : bool, default False
If ``drop=True``, drop coordinates variables indexed by integers
instead of making them scalar.
missing_dims : {"raise", "warn", "ignore"}, default: "raise"
What to do if dimensions that should be selected from are not present in the
Dataset:
- "raise": raise an exception
- "warning": raise a warning, and ignore the missing dimensions
- "warn": raise a warning, and ignore the missing dimensions
- "ignore": ignore the missing dimensions
**indexers_kwargs : {dim: indexer, ...}, optional
The keyword arguments form of ``indexers``.
Expand Down Expand Up @@ -2254,7 +2254,7 @@ def _isel_fancy(
indexers: Mapping[Any, Any],
*,
drop: bool,
missing_dims: str = "raise",
missing_dims: ErrorChoiceWithWarn = "raise",
) -> Dataset:
valid_indexers = dict(self._validate_indexers(indexers, missing_dims))

Expand All @@ -2270,6 +2270,8 @@ def _isel_fancy(
}
if var_indexers:
new_var = var.isel(indexers=var_indexers)
if drop and new_var.ndim == 0:
continue
else:
new_var = var.copy(deep=False)
if name not in indexes:
Expand Down Expand Up @@ -7713,7 +7715,7 @@ def query(
queries: Mapping[Any, Any] = None,
parser: str = "pandas",
engine: str = None,
missing_dims: str = "raise",
missing_dims: ErrorChoiceWithWarn = "raise",
**queries_kwargs: Any,
) -> Dataset:
"""Return a new dataset with each array indexed along the specified
Expand Down Expand Up @@ -7746,7 +7748,7 @@ def query(
Dataset:

- "raise": raise an exception
- "warning": raise a warning, and ignore the missing dimensions
- "warn": raise a warning, and ignore the missing dimensions
- "ignore": ignore the missing dimensions

**queries_kwargs : {dim: query, ...}, optional
Expand Down
5 changes: 4 additions & 1 deletion xarray/core/types.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import TYPE_CHECKING, TypeVar, Union
from typing import TYPE_CHECKING, Literal, TypeVar, Union

import numpy as np

Expand Down Expand Up @@ -33,3 +33,6 @@
DaCompatible = Union["DataArray", "Variable", "DataArrayGroupBy", "ScalarOrArray"]
VarCompatible = Union["Variable", "ScalarOrArray"]
GroupByIncompatible = Union["Variable", "GroupBy"]

ErrorChoice = Literal["raise", "ignore"]
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great! We could find / replace for these at some point

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Even though it is scope creep, I have typed any occurences that I have found in this PR.
Mypy is not complaining locally about these changes.

ErrorChoiceWithWarn = Literal["raise", "warn", "ignore"]
11 changes: 11 additions & 0 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1175,6 +1175,17 @@ def test_isel_fancy(self):
assert_array_equal(actual["var2"], expected_var2)
assert_array_equal(actual["var3"], expected_var3)

# test that drop works
ds = xr.Dataset({"a": (("x",), [1, 2, 3])}, coords={"b": (("x",), [5, 6, 7])})

actual = ds.isel({"x": 1}, drop=False)
expected = xr.Dataset({"a": 2}, coords={"b": 6})
assert_identical(actual, expected)

actual = ds.isel({"x": 1}, drop=True)
expected = xr.Dataset({"a": 2})
assert_identical(actual, expected)

def test_isel_dataarray(self):
"""Test for indexing by DataArray"""
data = create_test_data()
Expand Down