diff --git a/.github/ISSUE_TEMPLATE/bugreport.yml b/.github/ISSUE_TEMPLATE/bugreport.yml
index ba5bc8abaea..59e5889f5ec 100644
--- a/.github/ISSUE_TEMPLATE/bugreport.yml
+++ b/.github/ISSUE_TEMPLATE/bugreport.yml
@@ -1,4 +1,4 @@
-name: Bug Report
+name: π Bug Report
description: File a bug report to help us improve
labels: [bug, "needs triage"]
body:
@@ -26,14 +26,24 @@ body:
attributes:
label: Minimal Complete Verifiable Example
description: |
- Minimal, self-contained copy-pastable example that generates the issue if possible. Please be concise with code posted. See guidelines below on how to provide a good bug report:
+ Minimal, self-contained copy-pastable example that demonstrates the issue. This will be automatically formatted into code, so no need for markdown backticks.
+ render: Python
+
+ - type: checkboxes
+ id: mvce-checkboxes
+ attributes:
+ label: MVCE confirmation
+ description: |
+ Please confirm that the bug report is in an excellent state, so we can understand & fix it quickly & efficiently. For more details, check out:
- [Minimal Complete Verifiable Examples](https://stackoverflow.com/help/mcve)
- [Craft Minimal Bug Reports](http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports)
- Bug reports that follow these guidelines are easier to diagnose, and so are often handled much more quickly.
- This will be automatically formatted into code, so no need for markdown backticks.
- render: Python
+ options:
+ - label: Minimal example β the example is as focused as reasonably possible to demonstrate the underlying issue in xarray.
+ - label: Complete example β the example is self-contained, including all data and the text of any traceback.
+ - label: Verifiable example β the example copy & pastes into an IPython prompt or [Binder notebook](https://mybinder.org/v2/gh/pydata/xarray/main?urlpath=lab/tree/doc/examples/blank_template.ipynb), returning the result.
+ - label: New issue β a search of GitHub Issues suggests this is not a duplicate.
- type: textarea
id: log-output
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 994c594685d..83e5f3b97fa 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,12 +1,12 @@
blank_issues_enabled: false
contact_links:
- - name: Usage question
+ - name: β Usage question
url: https://github.com/pydata/xarray/discussions
about: |
Ask questions and discuss with other community members here.
If you have a question like "How do I concatenate a list of datasets?" then
please include a self-contained reproducible example if possible.
- - name: Raster analysis usage question
+ - name: πΊοΈ Raster analysis usage question
url: https://github.com/corteva/rioxarray/discussions
about: |
If you are using the rioxarray extension (engine='rasterio'), or have questions about
diff --git a/.github/ISSUE_TEMPLATE/misc.yml b/.github/ISSUE_TEMPLATE/misc.yml
index 94dd2d86567..a98f6d90c45 100644
--- a/.github/ISSUE_TEMPLATE/misc.yml
+++ b/.github/ISSUE_TEMPLATE/misc.yml
@@ -1,5 +1,5 @@
-name: Issue
-description: General Issue or discussion topic. For usage questions, please follow the "Usage question" link
+name: π Issue
+description: General issue, that's not a bug report.
labels: ["needs triage"]
body:
- type: markdown
diff --git a/.github/ISSUE_TEMPLATE/newfeature.yml b/.github/ISSUE_TEMPLATE/newfeature.yml
index 77cb15b7d37..04adf4bb867 100644
--- a/.github/ISSUE_TEMPLATE/newfeature.yml
+++ b/.github/ISSUE_TEMPLATE/newfeature.yml
@@ -1,4 +1,4 @@
-name: Feature Request
+name: π‘ Feature Request
description: Suggest an idea for xarray
labels: [enhancement]
body:
diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml
index f2542ab52d5..9a10403d44b 100644
--- a/.github/workflows/ci-additional.yaml
+++ b/.github/workflows/ci-additional.yaml
@@ -30,51 +30,19 @@ jobs:
with:
keyword: "[skip-ci]"
- test:
- name: ${{ matrix.os }} ${{ matrix.env }}
- runs-on: ${{ matrix.os }}
+ doctest:
+ name: Doctests
+ runs-on: "ubuntu-latest"
needs: detect-ci-trigger
if: needs.detect-ci-trigger.outputs.triggered == 'false'
defaults:
run:
shell: bash -l {0}
- strategy:
- fail-fast: false
- matrix:
- os: ["ubuntu-latest"]
- env:
- [
- # Minimum python version:
- "py38-bare-minimum",
- "py38-min-all-deps",
- # Latest python version:
- "py39-all-but-dask",
- "py39-flaky",
- ]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
-
- - name: Set environment variables
- run: |
- if [[ ${{ matrix.env }} == "py39-flaky" ]] ;
- then
- echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
- echo "PYTEST_EXTRA_FLAGS=--run-flaky --run-network-tests" >> $GITHUB_ENV
-
- else
- echo "CONDA_ENV_FILE=ci/requirements/${{ matrix.env }}.yml" >> $GITHUB_ENV
- fi
- - name: Cache conda
- uses: actions/cache@v3
- with:
- path: ~/conda_pkgs_dir
- key:
- ${{ runner.os }}-conda-${{ matrix.env }}-${{
- hashFiles('ci/requirements/**.yml') }}
-
- uses: conda-incubator/setup-miniconda@v2
with:
channels: conda-forge
@@ -82,44 +50,30 @@ jobs:
mamba-version: "*"
activate-environment: xarray-tests
auto-update-conda: false
- python-version: 3.9
- use-only-tar-bz2: true
+ python-version: "3.9"
- name: Install conda dependencies
run: |
- mamba env update -f $CONDA_ENV_FILE
-
+ mamba env update -f ci/requirements/environment.yml
- name: Install xarray
run: |
python -m pip install --no-deps -e .
-
- name: Version info
run: |
conda info -a
conda list
python xarray/util/print_versions.py
- - name: Import xarray
- run: |
- python -c "import xarray"
- - name: Run tests
+ - name: Run doctests
run: |
- python -m pytest -n 4 \
- --cov=xarray \
- --cov-report=xml \
- $PYTEST_EXTRA_FLAGS
+ python -m pytest --doctest-modules xarray --ignore xarray/tests
- - name: Upload code coverage to Codecov
- uses: codecov/codecov-action@v3.0.0
- with:
- file: ./coverage.xml
- flags: unittests,${{ matrix.env }}
- env_vars: RUNNER_OS
- name: codecov-umbrella
- fail_ci_if_error: false
- doctest:
- name: Doctests
+ mypy:
+ name: Mypy
runs-on: "ubuntu-latest"
- if: needs.detect-ci-trigger.outputs.triggered == 'false'
+ needs: detect-ci-trigger
+ # temporarily skipping due to https://github.com/pydata/xarray/issues/6551
+ # if: needs.detect-ci-trigger.outputs.triggered == 'false'
+ if: false
defaults:
run:
shell: bash -l {0}
@@ -148,9 +102,15 @@ jobs:
conda info -a
conda list
python xarray/util/print_versions.py
- - name: Run doctests
+ - name: Install mypy
run: |
- python -m pytest --doctest-modules xarray --ignore xarray/tests
+ python -m pip install mypy
+
+ # Temporarily overriding to be true due to https://github.com/pydata/xarray/issues/6551
+ # python -m mypy --install-types --non-interactive
+ - name: Run mypy
+ run: |
+ python -m mypy --install-types --non-interactive || true
min-version-policy:
name: Minimum Version Policy
@@ -176,5 +136,5 @@ jobs:
- name: minimum versions policy
run: |
mamba install -y pyyaml conda python-dateutil
- python ci/min_deps_check.py ci/requirements/py38-bare-minimum.yml
- python ci/min_deps_check.py ci/requirements/py38-min-all-deps.yml
+ python ci/min_deps_check.py ci/requirements/bare-minimum.yml
+ python ci/min_deps_check.py ci/requirements/min-all-deps.yml
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index a5c1a2de5ad..20f876b52fc 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -30,7 +30,7 @@ jobs:
with:
keyword: "[skip-ci]"
test:
- name: ${{ matrix.os }} py${{ matrix.python-version }}
+ name: ${{ matrix.os }} py${{ matrix.python-version }} ${{ matrix.env }}
runs-on: ${{ matrix.os }}
needs: detect-ci-trigger
if: needs.detect-ci-trigger.outputs.triggered == 'false'
@@ -43,6 +43,22 @@ jobs:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
# Bookend python versions
python-version: ["3.8", "3.10"]
+ env: [""]
+ include:
+ # Minimum python version:
+ - env: "bare-minimum"
+ python-version: "3.8"
+ os: ubuntu-latest
+ - env: "min-all-deps"
+ python-version: "3.8"
+ os: ubuntu-latest
+ # Latest python version:
+ - env: "all-but-dask"
+ python-version: "3.10"
+ os: ubuntu-latest
+ - env: "flaky"
+ python-version: "3.10"
+ os: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
@@ -52,18 +68,27 @@ jobs:
if [[ ${{ matrix.os }} == windows* ]] ;
then
echo "CONDA_ENV_FILE=ci/requirements/environment-windows.yml" >> $GITHUB_ENV
+ elif [[ "${{ matrix.env }}" != "" ]] ;
+ then
+ if [[ "${{ matrix.env }}" == "flaky" ]] ;
+ then
+ echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
+ echo "PYTEST_EXTRA_FLAGS=--run-flaky --run-network-tests" >> $GITHUB_ENV
+ else
+ echo "CONDA_ENV_FILE=ci/requirements/${{ matrix.env }}.yml" >> $GITHUB_ENV
+ fi
else
echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
-
fi
+
echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV
- name: Cache conda
uses: actions/cache@v3
with:
path: ~/conda_pkgs_dir
- key: ${{ runner.os }}-conda-py${{ matrix.python-version }}-${{
- hashFiles('ci/requirements/**.yml') }}
+ key: ${{ runner.os }}-conda-py${{ matrix.python-version }}-${{ hashFiles('ci/requirements/**.yml') }}-${{ matrix.env }}
+
- uses: conda-incubator/setup-miniconda@v2
with:
channels: conda-forge
@@ -78,6 +103,13 @@ jobs:
run: |
mamba env update -f $CONDA_ENV_FILE
+ # We only want to install this on one run, because otherwise we'll have
+ # duplicate annotations.
+ - name: Install error reporter
+ if: ${{ matrix.os }} == 'ubuntu-latest' and ${{ matrix.python-version }} == '3.10'
+ run: |
+ python -m pip install pytest-github-actions-annotate-failures
+
- name: Install xarray
run: |
python -m pip install --no-deps -e .
@@ -87,14 +119,17 @@ jobs:
conda info -a
conda list
python xarray/util/print_versions.py
+
- name: Import xarray
run: |
python -c "import xarray"
+
- name: Run tests
run: python -m pytest -n 4
--cov=xarray
--cov-report=xml
--junitxml=pytest.xml
+ $PYTEST_EXTRA_FLAGS
- name: Upload test results
if: always()
@@ -104,7 +139,7 @@ jobs:
path: pytest.xml
- name: Upload code coverage to Codecov
- uses: codecov/codecov-action@v3.0.0
+ uses: codecov/codecov-action@v3.1.0
with:
file: ./coverage.xml
flags: unittests
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index be87d823c98..1b11e285f1e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -35,6 +35,7 @@ repos:
hooks:
- id: blackdoc
exclude: "generate_reductions.py"
+ additional_dependencies: ["black==22.3.0"]
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
@@ -45,7 +46,7 @@ repos:
# - id: velin
# args: ["--write", "--compact"]
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v0.942
+ rev: v0.950
hooks:
- id: mypy
# Copied from setup.cfg
diff --git a/README.md b/README.md
index 57a68d42192..ff8fceefa99 100644
--- a/README.md
+++ b/README.md
@@ -7,6 +7,7 @@
[](https://pypi.python.org/pypi/xarray/)
[](https://github.com/python/black)
[](https://doi.org/10.5281/zenodo.598201)
+[](https://mybinder.org/v2/gh/pydata/xarray/main?urlpath=lab/tree/doc/examples/weather-data.ipynb)
[](https://twitter.com/xarray_dev)
**xarray** (formerly **xray**) is an open source project and Python
@@ -71,7 +72,7 @@ page](https://docs.xarray.dev/en/latest/contributing.html#).
## Get in touch
- Ask usage questions ("How do I?") on
- [StackOverflow](https://stackoverflow.com/questions/tagged/python-xarray).
+ [GitHub Discussions](https://github.com/pydata/xarray/discussions).
- Report bugs, suggest features or view the source code [on
GitHub](https://github.com/pydata/xarray).
- For less well defined questions or ideas, or to announce other
@@ -80,7 +81,7 @@ page](https://docs.xarray.dev/en/latest/contributing.html#).
## NumFOCUS
-[](https://numfocus.org/)
+
Xarray is a fiscally sponsored project of
[NumFOCUS](https://numfocus.org), a nonprofit dedicated to supporting
@@ -98,6 +99,12 @@ released as open source in May 2014. The project was renamed from
"xray" in January 2016. Xarray became a fiscally sponsored project of
[NumFOCUS](https://numfocus.org) in August 2018.
+## Contributors
+
+Thanks to our many contributors!
+
+[](https://github.com/pydata/xarray/graphs/contributors)
+
## License
Copyright 2014-2019, xarray Developers
diff --git a/ci/requirements/py39-all-but-dask.yml b/ci/requirements/all-but-dask.yml
similarity index 90%
rename from ci/requirements/py39-all-but-dask.yml
rename to ci/requirements/all-but-dask.yml
index e2488459e6b..e20ec2016ed 100644
--- a/ci/requirements/py39-all-but-dask.yml
+++ b/ci/requirements/all-but-dask.yml
@@ -3,7 +3,7 @@ channels:
- conda-forge
- nodefaults
dependencies:
- - python=3.9
+ - python=3.10
- black
- aiobotocore
- boto3
@@ -18,7 +18,7 @@ dependencies:
- h5py
- hdf5
- hypothesis
- - lxml # Optional dep of pydap
+ - lxml # Optional dep of pydap
- matplotlib-base
- nc-time-axis
- netcdf4
@@ -43,4 +43,4 @@ dependencies:
- typing_extensions
- zarr
- pip:
- - numbagg
+ - numbagg
diff --git a/ci/requirements/py38-bare-minimum.yml b/ci/requirements/bare-minimum.yml
similarity index 100%
rename from ci/requirements/py38-bare-minimum.yml
rename to ci/requirements/bare-minimum.yml
diff --git a/ci/requirements/environment.yml b/ci/requirements/environment.yml
index 7b198c9f0ca..d37bb7dc44a 100644
--- a/ci/requirements/environment.yml
+++ b/ci/requirements/environment.yml
@@ -38,7 +38,6 @@ dependencies:
- pytest
- pytest-cov
- pytest-env
- - pytest-github-actions-annotate-failures
- pytest-xdist
- rasterio
- scipy
diff --git a/ci/requirements/py38-min-all-deps.yml b/ci/requirements/min-all-deps.yml
similarity index 100%
rename from ci/requirements/py38-min-all-deps.yml
rename to ci/requirements/min-all-deps.yml
diff --git a/doc/examples/blank_template.ipynb b/doc/examples/blank_template.ipynb
new file mode 100644
index 00000000000..bcb15c1158d
--- /dev/null
+++ b/doc/examples/blank_template.ipynb
@@ -0,0 +1,58 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "d8f54f6a",
+ "metadata": {},
+ "source": [
+ "# Blank template\n",
+ "\n",
+ "Use this notebook from Binder to test an issue or reproduce a bug report"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "41b90ede",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import xarray as xr\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "\n",
+ "ds = xr.tutorial.load_dataset(\"air_temperature\")\n",
+ "da = ds[\"air\"]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "effd9aeb",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.8.10"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/doc/gallery.rst b/doc/gallery.rst
index 36eb39d1a53..22cf0c1c379 100644
--- a/doc/gallery.rst
+++ b/doc/gallery.rst
@@ -93,6 +93,7 @@ Notebook Examples
examples/ROMS_ocean_model
examples/ERA5-GRIB-example
examples/apply_ufunc_vectorize_1d
+ examples/blank_template
External Examples
diff --git a/doc/getting-started-guide/installing.rst b/doc/getting-started-guide/installing.rst
index 6177ba0aaac..0668853946f 100644
--- a/doc/getting-started-guide/installing.rst
+++ b/doc/getting-started-guide/installing.rst
@@ -102,7 +102,7 @@ release is guaranteed to work.
You can see the actual minimum tested versions:
-``_
+``_
.. _installation-instructions:
diff --git a/xarray/backends/plugins.py b/xarray/backends/plugins.py
index 7444fbf11eb..44953b875d9 100644
--- a/xarray/backends/plugins.py
+++ b/xarray/backends/plugins.py
@@ -1,6 +1,7 @@
import functools
import inspect
import itertools
+import sys
import warnings
from importlib.metadata import entry_points
@@ -95,7 +96,11 @@ def build_engines(entrypoints):
@functools.lru_cache(maxsize=1)
def list_engines():
- entrypoints = entry_points().get("xarray.backends", ())
+ # New selection mechanism introduced with Python 3.10. See GH6514.
+ if sys.version_info >= (3, 10):
+ entrypoints = entry_points(group="xarray.backends")
+ else:
+ entrypoints = entry_points().get("xarray.backends", ())
return build_engines(entrypoints)
diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py
index 2cf78fa7c61..d15cbd00c0d 100644
--- a/xarray/core/dataarray.py
+++ b/xarray/core/dataarray.py
@@ -1154,7 +1154,8 @@ def chunk(
chunks = {}
if isinstance(chunks, (float, str, int)):
- chunks = dict.fromkeys(self.dims, chunks)
+ # ignoring type; unclear why it won't accept a Literal into the value.
+ chunks = dict.fromkeys(self.dims, chunks) # type: ignore
elif isinstance(chunks, (tuple, list)):
chunks = dict(zip(self.dims, chunks))
else:
@@ -4735,7 +4736,7 @@ def curvefit(
def drop_duplicates(
self,
- dim: Hashable | Iterable[Hashable] | ...,
+ dim: Hashable | Iterable[Hashable],
keep: Literal["first", "last"] | Literal[False] = "first",
):
"""Returns a new DataArray with duplicate dimension values removed.
diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py
index 2c67cd665ca..76776b4bc44 100644
--- a/xarray/core/dataset.py
+++ b/xarray/core/dataset.py
@@ -7981,7 +7981,7 @@ def _wrapper(Y, *coords_, **kwargs):
def drop_duplicates(
self,
- dim: Hashable | Iterable[Hashable] | ...,
+ dim: Hashable | Iterable[Hashable],
keep: Literal["first", "last"] | Literal[False] = "first",
):
"""Returns a new Dataset with duplicate dimension values removed.
@@ -8005,9 +8005,11 @@ def drop_duplicates(
DataArray.drop_duplicates
"""
if isinstance(dim, str):
- dims = (dim,)
+ dims: Iterable = (dim,)
elif dim is ...:
dims = self.dims
+ elif not isinstance(dim, Iterable):
+ dims = [dim]
else:
dims = dim
diff --git a/xarray/core/indexing.py b/xarray/core/indexing.py
index 27bd4954bc4..cbbd507eeff 100644
--- a/xarray/core/indexing.py
+++ b/xarray/core/indexing.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import enum
import functools
import operator
@@ -6,19 +8,7 @@
from dataclasses import dataclass, field
from datetime import timedelta
from html import escape
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Hashable,
- Iterable,
- List,
- Mapping,
- Optional,
- Tuple,
- Union,
-)
+from typing import TYPE_CHECKING, Any, Callable, Hashable, Iterable, Mapping
import numpy as np
import pandas as pd
@@ -59,12 +49,12 @@ class IndexSelResult:
"""
- dim_indexers: Dict[Any, Any]
- indexes: Dict[Any, "Index"] = field(default_factory=dict)
- variables: Dict[Any, "Variable"] = field(default_factory=dict)
- drop_coords: List[Hashable] = field(default_factory=list)
- drop_indexes: List[Hashable] = field(default_factory=list)
- rename_dims: Dict[Any, Hashable] = field(default_factory=dict)
+ dim_indexers: dict[Any, Any]
+ indexes: dict[Any, Index] = field(default_factory=dict)
+ variables: dict[Any, Variable] = field(default_factory=dict)
+ drop_coords: list[Hashable] = field(default_factory=list)
+ drop_indexes: list[Hashable] = field(default_factory=list)
+ rename_dims: dict[Any, Hashable] = field(default_factory=dict)
def as_tuple(self):
"""Unlike ``dataclasses.astuple``, return a shallow copy.
@@ -82,7 +72,7 @@ def as_tuple(self):
)
-def merge_sel_results(results: List[IndexSelResult]) -> IndexSelResult:
+def merge_sel_results(results: list[IndexSelResult]) -> IndexSelResult:
all_dims_count = Counter([dim for res in results for dim in res.dim_indexers])
duplicate_dims = {k: v for k, v in all_dims_count.items() if v > 1}
@@ -124,13 +114,13 @@ def group_indexers_by_index(
obj: T_Xarray,
indexers: Mapping[Any, Any],
options: Mapping[str, Any],
-) -> List[Tuple["Index", Dict[Any, Any]]]:
+) -> list[tuple[Index, dict[Any, Any]]]:
"""Returns a list of unique indexes and their corresponding indexers."""
unique_indexes = {}
- grouped_indexers: Mapping[Union[int, None], Dict] = defaultdict(dict)
+ grouped_indexers: Mapping[int | None, dict] = defaultdict(dict)
for key, label in indexers.items():
- index: "Index" = obj.xindexes.get(key, None)
+ index: Index = obj.xindexes.get(key, None)
if index is not None:
index_id = id(index)
@@ -787,7 +777,7 @@ class IndexingSupport(enum.Enum):
def explicit_indexing_adapter(
key: ExplicitIndexer,
- shape: Tuple[int, ...],
+ shape: tuple[int, ...],
indexing_support: IndexingSupport,
raw_indexing_method: Callable,
) -> Any:
@@ -821,8 +811,8 @@ def explicit_indexing_adapter(
def decompose_indexer(
- indexer: ExplicitIndexer, shape: Tuple[int, ...], indexing_support: IndexingSupport
-) -> Tuple[ExplicitIndexer, ExplicitIndexer]:
+ indexer: ExplicitIndexer, shape: tuple[int, ...], indexing_support: IndexingSupport
+) -> tuple[ExplicitIndexer, ExplicitIndexer]:
if isinstance(indexer, VectorizedIndexer):
return _decompose_vectorized_indexer(indexer, shape, indexing_support)
if isinstance(indexer, (BasicIndexer, OuterIndexer)):
@@ -848,9 +838,9 @@ def _decompose_slice(key, size):
def _decompose_vectorized_indexer(
indexer: VectorizedIndexer,
- shape: Tuple[int, ...],
+ shape: tuple[int, ...],
indexing_support: IndexingSupport,
-) -> Tuple[ExplicitIndexer, ExplicitIndexer]:
+) -> tuple[ExplicitIndexer, ExplicitIndexer]:
"""
Decompose vectorized indexer to the successive two indexers, where the
first indexer will be used to index backend arrays, while the second one
@@ -929,10 +919,10 @@ def _decompose_vectorized_indexer(
def _decompose_outer_indexer(
- indexer: Union[BasicIndexer, OuterIndexer],
- shape: Tuple[int, ...],
+ indexer: BasicIndexer | OuterIndexer,
+ shape: tuple[int, ...],
indexing_support: IndexingSupport,
-) -> Tuple[ExplicitIndexer, ExplicitIndexer]:
+) -> tuple[ExplicitIndexer, ExplicitIndexer]:
"""
Decompose outer indexer to the successive two indexers, where the
first indexer will be used to index backend arrays, while the second one
@@ -973,7 +963,7 @@ def _decompose_outer_indexer(
return indexer, BasicIndexer(())
assert isinstance(indexer, (OuterIndexer, BasicIndexer))
- backend_indexer: List[Any] = []
+ backend_indexer: list[Any] = []
np_indexer = []
# make indexer positive
pos_indexer: list[np.ndarray | int | np.number] = []
@@ -1395,7 +1385,7 @@ def __array__(self, dtype: DTypeLike = None) -> np.ndarray:
return np.asarray(array.values, dtype=dtype)
@property
- def shape(self) -> Tuple[int]:
+ def shape(self) -> tuple[int]:
return (len(self.array),)
def _convert_scalar(self, item):
@@ -1420,13 +1410,13 @@ def _convert_scalar(self, item):
def __getitem__(
self, indexer
- ) -> Union[
- "PandasIndexingAdapter",
- NumpyIndexingAdapter,
- np.ndarray,
- np.datetime64,
- np.timedelta64,
- ]:
+ ) -> (
+ PandasIndexingAdapter
+ | NumpyIndexingAdapter
+ | np.ndarray
+ | np.datetime64
+ | np.timedelta64
+ ):
key = indexer.tuple
if isinstance(key, tuple) and len(key) == 1:
# unpack key so it can index a pandas.Index object (pandas.Index
@@ -1449,7 +1439,7 @@ def transpose(self, order) -> pd.Index:
def __repr__(self) -> str:
return f"{type(self).__name__}(array={self.array!r}, dtype={self.dtype!r})"
- def copy(self, deep: bool = True) -> "PandasIndexingAdapter":
+ def copy(self, deep: bool = True) -> PandasIndexingAdapter:
# Not the same as just writing `self.array.copy(deep=deep)`, as
# shallow copies of the underlying numpy.ndarrays become deep ones
# upon pickling
@@ -1476,7 +1466,7 @@ def __init__(
self,
array: pd.MultiIndex,
dtype: DTypeLike = None,
- level: Optional[str] = None,
+ level: str | None = None,
):
super().__init__(array, dtype)
self.level = level
@@ -1535,7 +1525,7 @@ def _repr_html_(self) -> str:
array_repr = short_numpy_repr(self._get_array_subset())
return f"{escape(array_repr)}
"
- def copy(self, deep: bool = True) -> "PandasMultiIndexingAdapter":
+ def copy(self, deep: bool = True) -> PandasMultiIndexingAdapter:
# see PandasIndexingAdapter.copy
array = self.array.copy(deep=True) if deep else self.array
return type(self)(array, self._dtype, self.level)
diff --git a/xarray/core/utils.py b/xarray/core/utils.py
index ba44f6d8466..1f309d210ff 100644
--- a/xarray/core/utils.py
+++ b/xarray/core/utils.py
@@ -237,7 +237,8 @@ def remove_incompatible_items(
del first_dict[k]
-def is_dict_like(value: Any) -> bool:
+# It's probably OK to give this as a TypeGuard; though it's not perfectly robust.
+def is_dict_like(value: Any) -> TypeGuard[dict]:
return hasattr(value, "keys") and hasattr(value, "__getitem__")
diff --git a/xarray/tests/test_coding_times.py b/xarray/tests/test_coding_times.py
index 92d27f22eb8..f10523aecbb 100644
--- a/xarray/tests/test_coding_times.py
+++ b/xarray/tests/test_coding_times.py
@@ -1007,12 +1007,9 @@ def test_decode_ambiguous_time_warns(calendar) -> None:
units = "days since 1-1-1"
expected = num2date(dates, units, calendar=calendar, only_use_cftime_datetimes=True)
- exp_warn_type = SerializationWarning if is_standard_calendar else None
-
- with pytest.warns(exp_warn_type) as record:
- result = decode_cf_datetime(dates, units, calendar=calendar)
-
if is_standard_calendar:
+ with pytest.warns(SerializationWarning) as record:
+ result = decode_cf_datetime(dates, units, calendar=calendar)
relevant_warnings = [
r
for r in record.list
@@ -1020,7 +1017,8 @@ def test_decode_ambiguous_time_warns(calendar) -> None:
]
assert len(relevant_warnings) == 1
else:
- assert not record
+ with assert_no_warnings():
+ result = decode_cf_datetime(dates, units, calendar=calendar)
np.testing.assert_array_equal(result, expected)
diff --git a/xarray/tests/test_formatting.py b/xarray/tests/test_formatting.py
index efdb8a57288..4bbf41c7b38 100644
--- a/xarray/tests/test_formatting.py
+++ b/xarray/tests/test_formatting.py
@@ -480,10 +480,10 @@ def test_short_numpy_repr() -> None:
assert num_lines < 30
# threshold option (default: 200)
- array = np.arange(100)
- assert "..." not in formatting.short_numpy_repr(array)
+ array2 = np.arange(100)
+ assert "..." not in formatting.short_numpy_repr(array2)
with xr.set_options(display_values_threshold=10):
- assert "..." in formatting.short_numpy_repr(array)
+ assert "..." in formatting.short_numpy_repr(array2)
def test_large_array_repr_length() -> None: