diff --git a/.cirrus.yml b/.cirrus.yml index b3992de64a4..c9c1d718596 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -38,7 +38,7 @@ env: # Conda packages to be installed. CONDA_CACHE_PACKAGES: "nox pip" # Git commit hash for iris test data. - IRIS_TEST_DATA_VERSION: "2.5" + IRIS_TEST_DATA_VERSION: "2.7" # Base directory for the iris-test-data. IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data @@ -60,7 +60,6 @@ linux_task_template: &LINUX_TASK_TEMPLATE - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CONDA_CACHE_BUILD}" - uname -r populate_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - bash miniconda.sh -b -p ${HOME}/miniconda - conda config --set always_yes yes --set changeps1 no - conda config --set show_channel_urls True @@ -141,8 +140,6 @@ task: only_if: ${SKIP_TEST_TASK} == "" << : *CREDITS_TEMPLATE matrix: - env: - PY_VER: 3.7 env: PY_VER: 3.8 name: "${CIRRUS_OS}: py${PY_VER} tests" @@ -153,7 +150,6 @@ task: << : *IRIS_TEST_DATA_TEMPLATE << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - echo "[Resources]" > ${SITE_CFG} - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} @@ -174,7 +170,6 @@ task: << : *IRIS_TEST_DATA_TEMPLATE << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - echo "[Resources]" > ${SITE_CFG} - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} @@ -197,7 +192,6 @@ task: name: "${CIRRUS_OS}: py${PY_VER} link check" << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - mkdir -p ${MPL_RC_DIR} - echo "backend : agg" > ${MPL_RC_FILE} - echo "image.cmap : viridis" >> ${MPL_RC_FILE} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..e9b45d116af --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Reference: +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" + labels: + - "New: Pull Request" + - "Bot" diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index b489eba0360..d4c01af48a0 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -1,10 +1,11 @@ -# This is a basic workflow to help you get started with Actions +# Use ASV to check for performance regressions in the last 24 hours' commits. name: benchmark-check on: - # Triggers the workflow on push or pull request events but only for the master branch - pull_request: + schedule: + # Runs every day at 23:00. + - cron: "0 23 * * *" jobs: benchmark: @@ -16,35 +17,29 @@ jobs: IRIS_TEST_DATA_PATH: benchmarks/iris-test-data IRIS_TEST_DATA_VERSION: "2.5" # Lets us manually bump the cache to rebuild + ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" + PY_VER: 3.8 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - - - name: Fetch the PR base branch too - run: | - git fetch --depth=1 origin ${{ github.event.pull_request.base.ref }} - git branch _base FETCH_HEAD - echo PR_BASE_SHA=$(git rev-parse _base) >> $GITHUB_ENV + with: + fetch-depth: 0 - name: Install Nox run: | pip install nox - - name: Cache .nox and .asv/env directories + - name: Cache environment directories id: cache-env-dir uses: actions/cache@v2 with: path: | .nox benchmarks/.asv/env - # Make sure GHA never gets an exact cache match by using the unique - # github.sha. This means it will always store this run as a new - # cache (Nox may have made relevant changes during run). Cache - # restoration still succeeds via the partial restore-key match. - key: ${{ runner.os }}-${{ github.sha }} - restore-keys: ${{ runner.os }} + $CONDA/pkgs + key: ${{ runner.os }}-${{ hashFiles('requirements/') }}-${{ env.ENV_CACHE_BUILD }} - name: Cache test data directory id: cache-test-data @@ -62,16 +57,51 @@ jobs: unzip -q iris-test-data.zip mkdir --parents ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_LOC_PATH} mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH} - + - name: Set test data var run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV - - name: Run CI benchmarks + - name: Run overnight benchmarks + run: | + first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1) + if [ "$first_commit" != "" ] + then + nox --session="benchmarks(overnight)" -- $first_commit + fi + + - name: Create issues for performance shifts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - mkdir --parents benchmarks/.asv - set -o pipefail - nox --session="benchmarks(ci compare)" | tee benchmarks/.asv/ci_compare.txt + if [ -d benchmarks/.asv/performance-shifts ] + then + cd benchmarks/.asv/performance-shifts + for commit_file in * + do + pr_number=$(git log "$commit_file"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) + assignee=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) + title="Performance Shift(s): \`$commit_file\`" + body=" + Benchmark comparison has identified performance shifts at commit \ + $commit_file (#$pr_number). Please review the report below and \ + take corrective/congratulatory action as appropriate \ + :slightly_smiling_face: + +
+ Performance shift report + + \`\`\` + $(cat $commit_file) + \`\`\` + +
+ + Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) + " + gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + done + fi - name: Archive asv results if: ${{ always() }} @@ -80,4 +110,3 @@ jobs: name: asv-report path: | benchmarks/.asv/results - benchmarks/.asv/ci_compare.txt diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml old mode 100755 new mode 100644 index 643825b3668..96572fb815a --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -22,7 +22,9 @@ on: default: "no" schedule: # Run once a week on a Saturday night - - cron: 1 0 * * 6 + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "1 0 * * 6" jobs: @@ -35,7 +37,7 @@ jobs: # the lockfile bot has made the head commit, abort the workflow. # This job can be manually overridden by running directly from the github actions panel # (known as a "workflow_dispatch") and setting the `clobber` input to "yes". - - uses: actions/script@v4 + - uses: actions/script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -71,7 +73,7 @@ jobs: strategy: matrix: - python: ['37', '38'] + python: ['38'] steps: - uses: actions/checkout@v2 @@ -108,13 +110,25 @@ jobs: rm -r artifacts - name: Create Pull Request - uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 + id: cpr + uses: peter-evans/create-pull-request@18f7dc018cc2cd597073088f7c7591b9d1c02672 with: commit-message: Updated environment lockfiles committer: "Lockfile bot " author: "Lockfile bot " delete-branch: true branch: auto-update-lockfiles - title: Update CI environment lockfiles + title: "[iris.ci] environment lockfiles auto-update" body: | Lockfiles updated to the latest resolvable environment. + labels: | + New: Pull Request + Bot + + - name: Check Pull Request + if: steps.cpr.outputs.pull-request-number != '' + run: | + echo "pull-request #${{ steps.cpr.outputs.pull-request-number }}" + echo "pull-request URL ${{ steps.cpr.outputs.pull-request-url }}" + echo "pull-request operation [${{ steps.cpr.outputs.pull-request-operation }}]" + echo "pull-request head SHA ${{ steps.cpr.outputs.pull-request-head-sha }}" diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a38a03637e8..a1bb0fca6cc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,16 +1,20 @@ # See https://github.com/actions/stale name: Stale issues and pull-requests + on: schedule: - - cron: 0 0 * * * + # Run once a day + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "0 0 * * *" jobs: stale: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v4.0.0 + - uses: actions/stale@v4.1.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} @@ -59,11 +63,11 @@ jobs: stale-pr-label: Stale # Labels on issues exempted from stale. - exempt-issue-labels: | + exempt-issue-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Labels on prs exempted from stale. - exempt-pr-labels: | + exempt-pr-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Max number of operations per run. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97dff666cfc..ee036038e45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 22.1.0 hooks: - id: black pass_filenames: false @@ -50,14 +50,14 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.0 + rev: v1.12.1 hooks: - id: blacken-docs types: [file, rst] additional_dependencies: [black==21.6b0] - repo: https://github.com/aio-libs/sort-all - rev: v1.1.0 + rev: v1.2.0 hooks: - id: sort-all types: [file, python] diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 00000000000..baa1afe7001 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,80 @@ +# Iris Performance Benchmarking + +Iris uses an [Airspeed Velocity](https://github.com/airspeed-velocity/asv) +(ASV) setup to benchmark performance. This is primarily designed to check for +performance shifts between commits using statistical analysis, but can also +be easily repurposed for manual comparative and scalability analyses. + +The benchmarks are automatically run overnight +[by a GitHub Action](../.github/workflows/benchmark.yml), with any notable +shifts in performance being flagged in a new GitHub issue. + +## Running benchmarks + +`asv ...` commands must be run from this directory. You will need to have ASV +installed, as well as Nox (see +[Benchmark environments](#benchmark-environments)). + +[Iris' noxfile](../noxfile.py) includes a `benchmarks` session that provides +conveniences for setting up before benchmarking, and can also replicate the +automated overnight run locally. See the session docstring for detail. + +### Environment variables + +* ``DATA_GEN_PYTHON`` - required - path to a Python executable that can be +used to generate benchmark test objects/files; see +[Data generation](#data-generation). The Nox session sets this automatically, +but will defer to any value already set in the shell. +* ``BENCHMARK_DATA`` - optional - path to a directory for benchmark synthetic +test data, which the benchmark scripts will create if it doesn't already +exist. Defaults to ``/benchmarks/.data/`` if not set. + +## Writing benchmarks + +[See the ASV docs](https://asv.readthedocs.io/) for full detail. + +### Data generation +**Important:** be sure not to use the benchmarking environment to generate any +test objects/files, as this environment changes with each commit being +benchmarked, creating inconsistent benchmark 'conditions'. The +[generate_data](./benchmarks/generate_data/__init__.py) module offers a +solution; read more detail there. + +### ASV re-run behaviour + +Note that ASV re-runs a benchmark multiple times between its `setup()` routine. +This is a problem for benchmarking certain Iris operations such as data +realisation, since the data will no longer be lazy after the first run. +Consider writing extra steps to restore objects' original state _within_ the +benchmark itself. + +If adding steps to the benchmark will skew the result too much then re-running +can be disabled by setting an attribute on the benchmark: `number = 1`. To +maintain result accuracy this should be accompanied by increasing the number of +repeats _between_ `setup()` calls using the `repeat` attribute. +`warmup_time = 0` is also advisable since ASV performs independent re-runs to +estimate run-time, and these will still be subject to the original problem. + +### Scaling / non-Scaling Performance Differences + +When comparing performance between commits/file-type/whatever it can be helpful +to know if the differences exist in scaling or non-scaling parts of the Iris +functionality in question. This can be done using a size parameter, setting +one value to be as small as possible (e.g. a scalar `Cube`), and the other to +be significantly larger (e.g. a 1000x1000 `Cube`). Performance differences +might only be seen for the larger value, or the smaller, or both, getting you +closer to the root cause. + +## Benchmark environments + +We have disabled ASV's standard environment management, instead using an +environment built using the same Nox scripts as Iris' test environments. This +is done using ASV's plugin architecture - see +[asv_delegated_conda.py](asv_delegated_conda.py) and the extra config items in +[asv.conf.json](asv.conf.json). + +(ASV is written to control the environment(s) that benchmarks are run in - +minimising external factors and also allowing it to compare between a matrix +of dependencies (each in a separate environment). We have chosen to sacrifice +these features in favour of testing each commit with its intended dependencies, +controlled by Nox + lock-files). diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 9ea1cdb101d..3468b2fca99 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -3,18 +3,25 @@ "project": "scitools-iris", "project_url": "https://github.com/SciTools/iris", "repo": "..", - "environment_type": "nox-conda", + "environment_type": "conda-delegated", "show_commit_url": "http://github.com/scitools/iris/commit/", "benchmark_dir": "./benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", "html_dir": ".asv/html", - "plugins": [".nox_asv_plugin"], - // The commit to checkout to first run Nox to set up the environment. - "nox_setup_commit": "HEAD", - // The path of the noxfile's location relative to the project root. - "noxfile_rel_path": "noxfile.py", - // The ``--session`` arg to be used with ``--install-only`` to prep an environment. - "nox_session_name": "tests" + "plugins": [".asv_delegated_conda"], + + // The command(s) that create/update an environment correctly for the + // checked-out commit. + // Interpreted the same as build_command, with following exceptions: + // * No build-time environment variables. + // * Is run in the same environment as the ASV install itself. + "delegated_env_commands": [ + "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", + "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + ], + // The parent directory of the above environment. + // The most recently modified environment in the directory will be used. + "delegated_env_parent": "{conf_dir}/.asv/env/nox01" } diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py new file mode 100644 index 00000000000..250a4e032d8 --- /dev/null +++ b/benchmarks/asv_delegated_conda.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +subclass that manages the Conda environment via custom user scripts. + +""" + +from os import environ +from os.path import getmtime +from pathlib import Path +from shutil import copy2, copytree, rmtree +from tempfile import TemporaryDirectory + +from asv import util as asv_util +from asv.config import Config +from asv.console import log +from asv.plugins.conda import Conda +from asv.repo import Repo + + +class CondaDelegated(Conda): + """ + Manage a Conda environment using custom user scripts, run at each commit. + + Ignores user input variations - ``matrix`` / ``pythons`` / + ``conda_environment_file``, since environment is being managed outside ASV. + + Original environment creation behaviour is inherited, but upon checking out + a commit the custom script(s) are run and the original environment is + replaced with a symlink to the custom environment. This arrangement is then + re-used in subsequent runs. + + """ + + tool_name = "conda-delegated" + + def __init__( + self, + conf: Config, + python: str, + requirements: dict, + tagged_env_vars: dict, + ) -> None: + """ + Parameters + ---------- + conf : Config instance + + python : str + Version of Python. Must be of the form "MAJOR.MINOR". + + requirements : dict + Dictionary mapping a PyPI package name to a version + identifier string. + + tagged_env_vars : dict + Environment variables, tagged for build vs. non-build + + """ + ignored = ["`python`"] + if requirements: + ignored.append("`requirements`") + if tagged_env_vars: + ignored.append("`tagged_env_vars`") + if conf.conda_environment_file: + ignored.append("`conda_environment_file`") + message = ( + f"Ignoring ASV setting(s): {', '.join(ignored)}. Benchmark " + "environment management is delegated to third party script(s)." + ) + log.warning(message) + requirements = {} + tagged_env_vars = {} + conf.conda_environment_file = None + + super().__init__(conf, python, requirements, tagged_env_vars) + self._update_info() + + self._env_commands = self._interpolate_commands( + conf.delegated_env_commands + ) + # Again using _interpolate_commands to get env parent path - allows use + # of the same ASV env variables. + env_parent_interpolated = self._interpolate_commands( + conf.delegated_env_parent + ) + # Returns list of tuples, we just want the first. + env_parent_first = env_parent_interpolated[0] + # The 'command' is the first item in the returned tuple. + env_parent_string = " ".join(env_parent_first[0]) + self._delegated_env_parent = Path(env_parent_string).resolve() + + @property + def name(self): + """Get a name to uniquely identify this environment.""" + return asv_util.sanitize_filename(self.tool_name) + + def _update_info(self) -> None: + """Make sure class properties reflect the actual environment being used.""" + # Follow symlink if it has been created. + actual_path = Path(self._path).resolve() + self._path = str(actual_path) + + # Get custom environment's Python version if it exists yet. + try: + get_version = ( + "from sys import version_info; " + "print(f'{version_info.major}.{version_info.minor}')" + ) + actual_python = self.run(["-c", get_version]) + self._python = actual_python + except OSError: + pass + + def _prep_env(self) -> None: + """Run the custom environment script(s) and switch to using that environment.""" + message = f"Running delegated environment management for: {self.name}" + log.info(message) + env_path = Path(self._path) + + def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: + """For copying between self._path and a temporary cache.""" + asv_files = list(src_parent.glob("asv*")) + # build_root_path.name usually == "project" . + asv_files += [src_parent / Path(self._build_root).name] + for src_path in asv_files: + dst_path = dst_parent / src_path.name + if not dst_path.exists(): + # Only caching in case the environment has been rebuilt. + # If the dst_path already exists: rebuilding hasn't + # happened. Also a non-issue when copying in the reverse + # direction because the cache dir is temporary. + if src_path.is_dir(): + func = copytree + else: + func = copy2 + func(src_path, dst_path) + + with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache: + asv_cache_path = Path(asv_cache) + # Cache all of ASV's files as delegated command may remove and + # re-build the environment. + copy_asv_files(env_path.resolve(), asv_cache_path) + + # Adapt the build_dir to the cache location. + build_root_path = Path(self._build_root) + build_dir_original = build_root_path / self._repo_subdir + build_dir_subpath = build_dir_original.relative_to( + build_root_path.parent + ) + build_dir = asv_cache_path / build_dir_subpath + + # Run the script(s) for delegated environment creation/updating. + # (An adaptation of self._interpolate_and_run_commands). + for command, env, return_codes, cwd in self._env_commands: + local_envs = dict(environ) + local_envs.update(env) + if cwd is None: + cwd = str(build_dir) + _ = asv_util.check_output( + command, + timeout=self._install_timeout, + cwd=cwd, + env=local_envs, + valid_return_codes=return_codes, + ) + + # Replace the env that ASV created with a symlink to the env + # created/updated by the custom script. + delegated_env_path = sorted( + self._delegated_env_parent.glob("*"), + key=getmtime, + reverse=True, + )[0] + if env_path.resolve() != delegated_env_path: + try: + env_path.unlink(missing_ok=True) + except IsADirectoryError: + rmtree(env_path) + env_path.symlink_to( + delegated_env_path, target_is_directory=True + ) + + # Check that environment exists. + try: + env_path.resolve(strict=True) + except FileNotFoundError: + message = f"Path does not resolve to environment: {env_path}" + log.error(message) + raise RuntimeError(message) + + # Restore ASV's files from the cache (if necessary). + copy_asv_files(asv_cache_path, env_path.resolve()) + + # Record new environment information in properties. + self._update_info() + + def checkout_project(self, repo: Repo, commit_hash: str) -> None: + """Check out the working tree of the project at given commit hash.""" + super().checkout_project(repo, commit_hash) + self._prep_env() + log.info( + f"Environment {self.name} updated to spec at {commit_hash[:8]}" + ) diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 2e741c3da03..4a964a648d9 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -5,45 +5,4 @@ # licensing details. """Common code for benchmarks.""" -import os -from pathlib import Path - -# Environment variable names -_ASVDIR_VARNAME = "ASV_DIR" # As set in nightly script "asv_nightly/asv.sh" -_DATADIR_VARNAME = "BENCHMARK_DATA" # For local runs - ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. - -# Work out where the benchmark data dir is. -asv_dir = os.environ.get("ASV_DIR", None) -if asv_dir: - # For an overnight run, this comes from the 'ASV_DIR' setting. - benchmark_data_dir = Path(asv_dir) / "data" -else: - # For a local run, you set 'BENCHMARK_DATA'. - benchmark_data_dir = os.environ.get(_DATADIR_VARNAME, None) - if benchmark_data_dir is not None: - benchmark_data_dir = Path(benchmark_data_dir) - - -def testdata_path(*path_names): - """ - Return the path of a benchmark test data file. - - These are based from a test-data location dir, which is either - ${}/data (for overnight tests), or ${} for local testing. - - If neither of these were set, an error is raised. - - """.format( - _ASVDIR_VARNAME, _DATADIR_VARNAME - ) - if benchmark_data_dir is None: - msg = ( - "Benchmark data dir is not defined : " - 'Either "${}" or "${}" must be set.' - ) - raise (ValueError(msg.format(_ASVDIR_VARNAME, _DATADIR_VARNAME))) - path = benchmark_data_dir.joinpath(*path_names) - path = str(path) # Because Iris doesn't understand Path objects yet. - return path diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py new file mode 100644 index 00000000000..a56f2e46230 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -0,0 +1,94 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for benchmarking. + +Data generated using Iris should use :func:`run_function_elsewhere`, which +means that data is generated using a fixed version of Iris and a fixed +environment, rather than those that get changed when the benchmarking run +checks out a new commit. + +Downstream use of data generated 'elsewhere' requires saving; usually in a +NetCDF file. Could also use pickling but there is a potential risk if the +benchmark sequence runs over two different Python versions. + +""" +from inspect import getsource +from os import environ +from pathlib import Path +from subprocess import CalledProcessError, check_output, run +from textwrap import dedent + +#: Python executable used by :func:`run_function_elsewhere`, set via env +#: variable of same name. Must be path of Python within an environment that +#: includes Iris (including dependencies and test modules) and Mule. +try: + DATA_GEN_PYTHON = environ["DATA_GEN_PYTHON"] + _ = check_output([DATA_GEN_PYTHON, "-c", "a = True"]) +except KeyError: + error = "Env variable DATA_GEN_PYTHON not defined." + raise KeyError(error) +except (CalledProcessError, FileNotFoundError, PermissionError): + error = ( + "Env variable DATA_GEN_PYTHON not a runnable python executable path." + ) + raise ValueError(error) + +# The default location of data files used in benchmarks. Used by CI. +default_data_dir = (Path(__file__).parents[2] / ".data").resolve() +# Optionally override the default data location with environment variable. +BENCHMARK_DATA = Path(environ.get("BENCHMARK_DATA", default_data_dir)) +if BENCHMARK_DATA == default_data_dir: + BENCHMARK_DATA.mkdir(exist_ok=True) +elif not BENCHMARK_DATA.is_dir(): + message = f"Not a directory: {BENCHMARK_DATA} ." + raise ValueError(message) + +# Manual flag to allow the rebuilding of synthetic data. +# False forces a benchmark run to re-make all the data files. +REUSE_DATA = True + + +def run_function_elsewhere(func_to_run, *args, **kwargs): + """ + Run a given function using the :const:`DATA_GEN_PYTHON` executable. + + This structure allows the function to be written natively. + + Parameters + ---------- + func_to_run : FunctionType + The function object to be run. + NOTE: the function must be completely self-contained, i.e. perform all + its own imports (within the target :const:`DATA_GEN_PYTHON` + environment). + *args : tuple, optional + Function call arguments. Must all be expressible as simple literals, + i.e. the ``repr`` must be a valid literal expression. + **kwargs: dict, optional + Function call keyword arguments. All values must be expressible as + simple literals (see ``*args``). + + Returns + ------- + str + The ``stdout`` from the run. + + """ + func_string = dedent(getsource(func_to_run)) + func_string = func_string.replace("@staticmethod\n", "") + func_call_term_strings = [repr(arg) for arg in args] + func_call_term_strings += [ + f"{name}={repr(val)}" for name, val in kwargs.items() + ] + func_call_string = ( + f"{func_to_run.__name__}(" + ",".join(func_call_term_strings) + ")" + ) + python_string = "\n".join([func_string, func_call_string]) + result = run( + [DATA_GEN_PYTHON, "-c", python_string], capture_output=True, check=True + ) + return result.stdout diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py new file mode 100644 index 00000000000..1037954f08f --- /dev/null +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -0,0 +1,215 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Generate FF, PP and NetCDF files based on a minimal synthetic FF file. + +NOTE: uses the Mule package, so depends on an environment with Mule installed. +""" + + +def _create_um_files( + len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict +) -> None: + """ + Generate an FF object of given shape and compression, save to FF/PP/NetCDF. + + This is run externally + (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports + are self-contained and input parameters are simple types. + """ + from copy import deepcopy + from datetime import datetime + from tempfile import NamedTemporaryFile + + from mo_pack import compress_wgdos as mo_pack_compress + from mule import ArrayDataProvider, Field3, FieldsFile + from mule.pp import fields_to_pp_file + import numpy as np + + from iris import load_cube + from iris import save as save_cube + + def packing_patch(*compress_args, **compress_kwargs) -> bytes: + """ + Force conversion from returned :class:`memoryview` to :class:`bytes`. + + Downstream uses of :func:`mo_pack.compress_wgdos` were written + for the ``Python2`` behaviour, where the returned buffer had a + different ``__len__`` value to the current :class:`memoryview`. + Unable to fix directly in Mule, so monkey patching for now. + """ + return mo_pack_compress(*compress_args, **compress_kwargs).tobytes() + + import mo_pack + + mo_pack.compress_wgdos = packing_patch + + ######## + + template = { + "fixed_length_header": {"dataset_type": 3, "grid_staggering": 3}, + "integer_constants": { + "num_p_levels": len_z, + "num_cols": len_x, + "num_rows": len_y, + }, + "real_constants": {}, + "level_dependent_constants": {"dims": (len_z + 1, None)}, + } + new_ff = FieldsFile.from_template(deepcopy(template)) + + data_array = np.arange(len_x * len_y).reshape(len_x, len_y) + array_provider = ArrayDataProvider(data_array) + + def add_field(level_: int, time_step_: int) -> None: + """ + Add a minimal field to the new :class:`~mule.FieldsFile`. + + Includes the minimum information to allow Mule saving and Iris + loading, as well as incrementation for vertical levels and time + steps to allow generation of z and t dimensions. + """ + new_field = Field3.empty() + # To correspond to the header-release 3 class used. + new_field.lbrel = 3 + # Mule uses the first element of the lookup to test for + # unpopulated fields (and skips them), so the first element should + # be set to something. The year will do. + new_field.raw[1] = datetime.now().year + + # Horizontal. + new_field.lbcode = 1 + new_field.lbnpt = len_x + new_field.lbrow = len_y + new_field.bdx = new_ff.real_constants.col_spacing + new_field.bdy = new_ff.real_constants.row_spacing + new_field.bzx = new_ff.real_constants.start_lon - 0.5 * new_field.bdx + new_field.bzy = new_ff.real_constants.start_lat - 0.5 * new_field.bdy + + # Hemisphere. + new_field.lbhem = 32 + # Processing. + new_field.lbproc = 0 + + # Vertical. + # Hybrid height values by simulating sequences similar to those in a + # theta file. + new_field.lbvc = 65 + if level_ == 0: + new_field.lblev = 9999 + else: + new_field.lblev = level_ + + level_1 = level_ + 1 + six_rec = 20 / 3 + three_rec = six_rec / 2 + + new_field.blev = level_1**2 * six_rec - six_rec + new_field.brsvd1 = ( + level_1**2 * six_rec + (six_rec * level_1) - three_rec + ) + + brsvd2_simulated = np.linspace(0.995, 0, len_z) + shift = min(len_z, 2) + bhrlev_simulated = np.concatenate( + [np.ones(shift), brsvd2_simulated[:-shift]] + ) + new_field.brsvd2 = brsvd2_simulated[level_] + new_field.bhrlev = bhrlev_simulated[level_] + + # Time. + new_field.lbtim = 11 + + new_field.lbyr = time_step_ + for attr_name in ["lbmon", "lbdat", "lbhr", "lbmin", "lbsec"]: + setattr(new_field, attr_name, 0) + + new_field.lbyrd = time_step_ + 1 + for attr_name in ["lbmond", "lbdatd", "lbhrd", "lbmind", "lbsecd"]: + setattr(new_field, attr_name, 0) + + # Data and packing. + new_field.lbuser1 = 1 + new_field.lbpack = int(compress) + new_field.bacc = 0 + new_field.bmdi = -1 + new_field.lbext = 0 + new_field.set_data_provider(array_provider) + + new_ff.fields.append(new_field) + + for time_step in range(len_t): + for level in range(len_z): + add_field(level, time_step + 1) + + ff_path = save_paths.get("FF", None) + pp_path = save_paths.get("PP", None) + nc_path = save_paths.get("NetCDF", None) + + if ff_path: + new_ff.to_file(ff_path) + if pp_path: + fields_to_pp_file(str(pp_path), new_ff.fields) + if nc_path: + temp_ff_path = None + # Need an Iris Cube from the FF content. + if ff_path: + # Use the existing file. + ff_cube = load_cube(ff_path) + else: + # Make a temporary file. + temp_ff_path = NamedTemporaryFile() + new_ff.to_file(temp_ff_path.name) + ff_cube = load_cube(temp_ff_path.name) + + save_cube(ff_cube, nc_path, zlib=compress) + if temp_ff_path: + temp_ff_path.close() + + +FILE_EXTENSIONS = {"FF": "", "PP": ".pp", "NetCDF": ".nc"} + + +def create_um_files( + len_x: int, + len_y: int, + len_z: int, + len_t: int, + compress: bool, + file_types: list, +) -> dict: + """ + Generate FF-based FF / PP / NetCDF files with specified shape and compression. + + All files representing a given shape are saved in a dedicated directory. A + dictionary of the saved paths is returned. + + If the required files exist, they are re-used, unless + :const:`benchmarks.REUSE_DATA` is ``False``. + """ + # Self contained imports to avoid linting confusion with _create_um_files(). + from . import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere + + save_name_sections = ["UM", len_x, len_y, len_z, len_t] + save_name = "_".join(str(section) for section in save_name_sections) + save_dir = BENCHMARK_DATA / save_name + if not save_dir.is_dir(): + save_dir.mkdir(parents=True) + + save_paths = {} + files_exist = True + for file_type in file_types: + file_ext = FILE_EXTENSIONS[file_type] + save_path = (save_dir / f"{compress}").with_suffix(file_ext) + files_exist = files_exist and save_path.is_file() + save_paths[file_type] = str(save_path) + + if not REUSE_DATA or not files_exist: + _ = run_function_elsewhere( + _create_um_files, len_x, len_y, len_z, len_t, compress, save_paths + ) + + return save_paths diff --git a/benchmarks/benchmarks/loading.py b/benchmarks/benchmarks/loading.py new file mode 100644 index 00000000000..4558c3b5cba --- /dev/null +++ b/benchmarks/benchmarks/loading.py @@ -0,0 +1,185 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmark tests. + +Where applicable benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. Size should be _just_ large + enough - don't want to bloat benchmark runtime. + +""" + +from iris import AttributeConstraint, Constraint, load, load_cube +from iris.cube import Cube +from iris.fileformats.um import structured_um_loading + +from .generate_data import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere +from .generate_data.um_files import create_um_files + + +class LoadAndRealise: + params = [ + [(2, 2, 2), (1280, 960, 5)], + [False, True], + ["FF", "PP", "NetCDF"], + ] + param_names = ["xyz", "compressed", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[2] + file_path_dict = {} + for xyz in self.params[0]: + file_path_dict[xyz] = {} + x, y, z = xyz + for compress in self.params[1]: + file_path_dict[xyz][compress] = create_um_files( + x, y, z, 1, compress, file_type_args + ) + return file_path_dict + + def setup( + self, + file_path_dict: dict, + xyz: tuple, + compress: bool, + file_format: str, + ) -> None: + self.file_path = file_path_dict[xyz][compress][file_format] + self.cube = self.load() + + def load(self) -> Cube: + return load_cube(self.file_path) + + def time_load(self, _, __, ___, ____) -> None: + _ = self.load() + + def time_realise(self, _, __, ___, ____) -> None: + # Don't touch cube.data - permanent realisation plays badly with ASV's + # re-run strategy. + assert self.cube.has_lazy_data() + self.cube.core_data().compute() + + +class STASHConstraint: + # xyz sizes mimic LoadAndRealise to maximise file re-use. + params = [[(2, 2, 2), (1280, 960, 5)], ["FF", "PP"]] + param_names = ["xyz", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files( + x, y, z, 1, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, xyz: tuple, file_format: str + ) -> None: + self.file_path = file_path_dict[xyz][file_format] + + def time_stash_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, AttributeConstraint(STASH="m??s??i901")) + + +class TimeConstraint: + params = [[3, 20], ["FF", "PP", "NetCDF"]] + param_names = ["time_dim_len", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for time_dim_len in self.params[0]: + file_path_dict[time_dim_len] = create_um_files( + 20, 20, 5, time_dim_len, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, time_dim_len: int, file_format: str + ) -> None: + self.file_path = file_path_dict[time_dim_len][file_format] + self.time_constr = Constraint(time=lambda cell: cell.point.year < 3) + + def time_time_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, self.time_constr) + + +class ManyVars: + FILE_PATH = BENCHMARK_DATA / "many_var_file.nc" + + @staticmethod + def _create_file(save_path: str) -> None: + """Is run externally - everything must be self-contained.""" + import numpy as np + + from iris import save + from iris.coords import AuxCoord + from iris.cube import Cube + + data_len = 8 + data = np.arange(data_len) + cube = Cube(data, units="unknown") + extra_vars = 80 + names = ["coord_" + str(i) for i in range(extra_vars)] + for name in names: + coord = AuxCoord(data, long_name=name, units="unknown") + cube.add_aux_coord(coord, 0) + save(cube, save_path) + + def setup_cache(self) -> None: + if not REUSE_DATA or not self.FILE_PATH.is_file(): + # See :mod:`benchmarks.generate_data` docstring for full explanation. + _ = run_function_elsewhere( + self._create_file, + str(self.FILE_PATH), + ) + + def time_many_var_load(self) -> None: + _ = load(str(self.FILE_PATH)) + + +class StructuredFF: + """ + Test structured loading of a large-ish fieldsfile. + + Structured load of the larger size should show benefit over standard load, + avoiding the cost of merging. + """ + + params = [[(2, 2, 2), (1280, 960, 5)], [False, True]] + param_names = ["xyz", "structured_loading"] + + def setup_cache(self) -> dict: + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files(x, y, z, 1, False, ["FF"]) + return file_path_dict + + def setup(self, file_path_dict, xyz, structured_load): + self.file_path = file_path_dict[xyz]["FF"] + self.structured_load = structured_load + + def load(self): + """Load the whole file (in fact there is only 1 cube).""" + + def _load(): + _ = load(self.file_path) + + if self.structured_load: + with structured_um_loading(): + _load() + else: + _load() + + def time_structured_load(self, _, __, ___): + self.load() diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 45905abd2ff..24899776dc8 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -22,7 +22,7 @@ def setup(self): # Should generate 10 distinct contours, regardless of dim size. dim_size = int(ARTIFICIAL_DIM_SIZE / 5) repeat_number = int(dim_size / 10) - repeat_range = range(int((dim_size ** 2) / repeat_number)) + repeat_range = range(int((dim_size**2) / repeat_number)) data = np.repeat(repeat_range, repeat_number) data = data.reshape((dim_size,) * 2) diff --git a/benchmarks/nox_asv_plugin.py b/benchmarks/nox_asv_plugin.py deleted file mode 100644 index 6c9ce142721..00000000000 --- a/benchmarks/nox_asv_plugin.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -ASV plug-in providing an alternative ``Environment`` subclass, which uses Nox -for environment management. - -""" -from importlib.util import find_spec -from pathlib import Path -from shutil import copy2, copytree -from tempfile import TemporaryDirectory - -from asv import util as asv_util -from asv.config import Config -from asv.console import log -from asv.environment import get_env_name -from asv.plugins.conda import Conda, _find_conda -from asv.repo import Repo, get_repo - - -class NoxConda(Conda): - """ - Manage a Conda environment using Nox, updating environment at each commit. - - Defers environment management to the project's noxfile, which must be able - to create/update the benchmarking environment using ``nox --install-only``, - with the ``--session`` specified in ``asv.conf.json.nox_session_name``. - - Notes - ----- - If not all benchmarked commits support this use of Nox: the plugin will - need to be modified to prep the environment in other ways. - - """ - - tool_name = "nox-conda" - - @classmethod - def matches(cls, python: str) -> bool: - """Used by ASV to work out if this type of environment can be used.""" - result = find_spec("nox") is not None - if result: - result = super().matches(python) - - if result: - message = ( - f"NOTE: ASV env match check incomplete. Not possible to know " - f"if selected Nox session (asv.conf.json.nox_session_name) is " - f"compatible with ``--python={python}`` until project is " - f"checked out." - ) - log.warning(message) - - return result - - def __init__(self, conf: Config, python: str, requirements: dict) -> None: - """ - Parameters - ---------- - conf: Config instance - - python : str - Version of Python. Must be of the form "MAJOR.MINOR". - - requirements : dict - Dictionary mapping a PyPI package name to a version - identifier string. - - """ - from nox.sessions import _normalize_path - - # Need to checkout the project BEFORE the benchmark run - to access a noxfile. - self.project_temp_checkout = TemporaryDirectory( - prefix="nox_asv_checkout_" - ) - repo = get_repo(conf) - repo.checkout(self.project_temp_checkout.name, conf.nox_setup_commit) - self.noxfile_rel_path = conf.noxfile_rel_path - self.setup_noxfile = ( - Path(self.project_temp_checkout.name) / self.noxfile_rel_path - ) - self.nox_session_name = conf.nox_session_name - - # Some duplication of parent code - need these attributes BEFORE - # running inherited code. - self._python = python - self._requirements = requirements - self._env_dir = conf.env_dir - - # Prepare the actual environment path, to override self._path. - nox_envdir = str(Path(self._env_dir).absolute() / self.hashname) - nox_friendly_name = self._get_nox_session_name(python) - self._nox_path = Path(_normalize_path(nox_envdir, nox_friendly_name)) - - # For storing any extra conda requirements from asv.conf.json. - self._extra_reqs_path = self._nox_path / "asv-extra-reqs.yaml" - - super().__init__(conf, python, requirements) - - @property - def _path(self) -> str: - """ - Using a property to override getting and setting in parent classes - - unable to modify parent classes as this is a plugin. - - """ - return str(self._nox_path) - - @_path.setter - def _path(self, value) -> None: - """Enforce overriding of this variable by disabling modification.""" - pass - - @property - def name(self) -> str: - """Overridden to prevent inclusion of user input requirements.""" - return get_env_name(self.tool_name, self._python, {}) - - def _get_nox_session_name(self, python: str) -> str: - nox_cmd_substring = ( - f"--noxfile={self.setup_noxfile} " - f"--session={self.nox_session_name} " - f"--python={python}" - ) - - list_output = asv_util.check_output( - ["nox", "--list", *nox_cmd_substring.split(" ")], - display_error=False, - dots=False, - ) - list_output = list_output.split("\n") - list_matches = list(filter(lambda s: s.startswith("*"), list_output)) - matches_count = len(list_matches) - - if matches_count == 0: - message = f"No Nox sessions found for: {nox_cmd_substring} ." - log.error(message) - raise RuntimeError(message) - elif matches_count > 1: - message = ( - f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} ." - ) - log.error(message) - raise RuntimeError(message) - else: - line = list_matches[0] - session_name = line.split(" ")[1] - assert isinstance(session_name, str) - return session_name - - def _nox_prep_env(self, setup: bool = False) -> None: - message = f"Running Nox environment update for: {self.name}" - log.info(message) - - build_root_path = Path(self._build_root) - env_path = Path(self._path) - - def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: - """For copying between self._path and a temporary cache.""" - asv_files = list(src_parent.glob("asv*")) - # build_root_path.name usually == "project" . - asv_files += [src_parent / build_root_path.name] - for src_path in asv_files: - dst_path = dst_parent / src_path.name - if not dst_path.exists(): - # Only cache-ing in case Nox has rebuilt the env @ - # self._path. If the dst_path already exists: rebuilding - # hasn't happened. Also a non-issue when copying in the - # reverse direction because the cache dir is temporary. - if src_path.is_dir(): - func = copytree - else: - func = copy2 - func(src_path, dst_path) - - with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache: - asv_cache_path = Path(asv_cache) - if setup: - noxfile = self.setup_noxfile - else: - # Cache all of ASV's files as Nox may remove and re-build the environment. - copy_asv_files(env_path, asv_cache_path) - # Get location of noxfile in cache. - noxfile_original = ( - build_root_path / self._repo_subdir / self.noxfile_rel_path - ) - noxfile_subpath = noxfile_original.relative_to( - build_root_path.parent - ) - noxfile = asv_cache_path / noxfile_subpath - - nox_cmd = [ - "nox", - f"--noxfile={noxfile}", - # Place the env in the ASV env directory, instead of the default. - f"--envdir={env_path.parent}", - f"--session={self.nox_session_name}", - f"--python={self._python}", - "--install-only", - "--no-error-on-external-run", - "--verbose", - ] - - _ = asv_util.check_output(nox_cmd) - if not env_path.is_dir(): - message = f"Expected Nox environment not found: {env_path}" - log.error(message) - raise RuntimeError(message) - - if not setup: - # Restore ASV's files from the cache (if necessary). - copy_asv_files(asv_cache_path, env_path) - - def _setup(self) -> None: - """Used for initial environment creation - mimics parent method where possible.""" - try: - self.conda = _find_conda() - except IOError as e: - raise asv_util.UserError(str(e)) - if find_spec("nox") is None: - raise asv_util.UserError("Module not found: nox") - - message = f"Creating Nox-Conda environment for {self.name} ." - log.info(message) - - try: - self._nox_prep_env(setup=True) - finally: - # No longer need the setup checkout now that the environment has been built. - self.project_temp_checkout.cleanup() - - conda_args, pip_args = self._get_requirements(self.conda) - if conda_args or pip_args: - message = ( - "Ignoring user input package requirements. Benchmark " - "environment management is exclusively performed by Nox." - ) - log.warning(message) - - def checkout_project(self, repo: Repo, commit_hash: str) -> None: - """Check out the working tree of the project at given commit hash.""" - super().checkout_project(repo, commit_hash) - self._nox_prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index c3c056eb4ac..b09040c64e9 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -30,7 +30,7 @@ def main(): # To illustrate the full range of barbs, scale the wind speed up to pretend # that a storm is passing over - magnitude = (uwind ** 2 + vwind ** 2) ** 0.5 + magnitude = (uwind**2 + vwind**2) ** 0.5 magnitude.convert_units("knot") max_speed = magnitude.collapsed( ("latitude", "longitude"), iris.analysis.MAX @@ -41,7 +41,7 @@ def main(): vwind = vwind / max_speed * max_desired # Create a cube containing the wind speed - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") windspeed.convert_units("knot") diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index fd03f542057..40d9d0da002 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -27,7 +27,7 @@ def main(): vwind = iris.load_cube(infile, "y_wind") # Create a cube containing the wind speed. - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") # Plot the wind speed as a contour plot. diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 0b0240b633e..3d62347f493 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -37,6 +37,7 @@ .. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash .. _using git: https://docs.github.com/en/github/using-git .. _requirements/ci/: https://github.com/SciTools/iris/tree/main/requirements/ci +.. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/ .. _voteable issues on GitHub: https://github.com/SciTools/iris/issues?q=is%3Aopen+is%3Aissue+label%3A%22Feature%3A+Voteable%22+sort%3Areactions-%2B1-desc diff --git a/docs/src/conf.py b/docs/src/conf.py index 287794368a1..4a5d866c2f3 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -200,7 +200,9 @@ def _dotv(version): # -- copybutton extension ----------------------------------------------------- # See https://sphinx-copybutton.readthedocs.io/en/latest/ -copybutton_prompt_text = ">>> " +copybutton_prompt_text = r">>> |\.\.\. " +copybutton_prompt_is_regexp = True +copybutton_line_continuation_character = "\\" # sphinx.ext.todo configuration ----------------------------------------------- # See https://www.sphinx-doc.org/en/master/usage/extensions/todo.html @@ -209,6 +211,7 @@ def _dotv(version): # api generation configuration autodoc_member_order = "groupwise" autodoc_default_flags = ["show-inheritance"] +autodoc_typehints = "none" autosummary_generate = True autosummary_imported_members = True autopackage_name = ["iris"] @@ -324,6 +327,7 @@ def _dotv(version): "https://software.ac.uk/how-cite-software", "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", + "https://www.metoffice.gov.uk/", ] # list of sources to exclude from the build. @@ -339,8 +343,10 @@ def _dotv(version): "gallery_dirs": ["generated/gallery"], # filename pattern for the files in the gallery "filename_pattern": "/plot_", - # filename patternt to ignore in the gallery + # filename pattern to ignore in the gallery "ignore_pattern": r"__init__\.py", + # force gallery building, unless overridden (see src/Makefile) + "plot_gallery": "'True'", } # ----------------------------------------------------------------------------- diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index ebb553024bc..576fc5f6a68 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -4,16 +4,21 @@ Contributing a "What's New" Entry ================================= -Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes -that will form the next release. Contributions to the :ref:`iris_whatsnew` +Iris uses a file named ``dev.rst`` to keep a draft of upcoming development changes +that will form the next stable release. Contributions to the :ref:`iris_whatsnew` document are written by the developer most familiar with the change made. The contribution should be included as part of the Iris Pull Request that introduces the change. -The ``latest.rst`` and the past release notes are kept in -``docs/src/whatsnew/``. If you are writing the first contribution after -an Iris release: **create the new** ``latest.rst`` by copying the content from -``latest.rst.template`` in the same directory. +The ``dev.rst`` and the past release notes are kept in the +``docs/src/whatsnew/`` directory. If you are writing the first contribution after +an Iris release: **create the new** ``dev.rst`` by copying the content from +``dev.rst.template`` in the same directory. + +.. note:: + + Ensure that the symbolic link ``latest.rst`` references the ``dev.rst`` file + within the ``docs/src/whatsnew`` directory. Since the `Contribution categories`_ include Internal changes, **all** Iris Pull Requests should be accompanied by a "What's New" contribution. @@ -22,7 +27,7 @@ Pull Requests should be accompanied by a "What's New" contribution. Git Conflicts ============= -If changes to ``latest.rst`` are being suggested in several simultaneous +If changes to ``dev.rst`` are being suggested in several simultaneous Iris Pull Requests, Git will likely encounter merge conflicts. If this situation is thought likely (large PR, high repo activity etc.): @@ -43,7 +48,7 @@ situation is thought likely (large PR, high repo activity etc.): * PR reviewer: review the "What's New" PR, merge once acceptable -These measures should mean the suggested ``latest.rst`` changes are outstanding +These measures should mean the suggested ``dev.rst`` changes are outstanding for the minimum time, minimising conflicts and minimising the need to rebase or merge from trunk. diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index bcf075e4ae7..f4d44781fc6 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -36,6 +36,14 @@ Ensure that any behaviour which has been deprecated for the correct number of previous releases is now finally changed. More detail, including the correct number of releases, is in :ref:`iris_development_deprecations`. +Standard Names +~~~~~~~~~~~~~~ + +Update the file ``etc/cf-standard-name-table.xml`` to the latest CF standard names, +from the `latest CF standard names`_. +( This is used during build to automatically generate the sourcefile +``lib/iris/std_names.py``. ) + Release Branch -------------- @@ -175,9 +183,9 @@ back onto the ``SciTools/iris`` ``main`` branch. To achieve this, first cut a local branch from the latest ``main`` branch, and `git merge` the :literal:`.x` release branch into it. Ensure that the -``iris.__version__``, ``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` -are correct, before committing these changes and then proposing a pull-request -on the ``main`` branch of ``SciTools/iris``. +``iris.__version__``, ``docs/src/whatsnew/index.rst``, ``docs/src/whatsnew/dev.rst``, +and ``docs/src/whatsnew/latest.rst`` are correct, before committing these changes +and then proposing a pull-request on the ``main`` branch of ``SciTools/iris``. Point Releases @@ -210,9 +218,11 @@ Release Steps #. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` #. Update the ``whatsnew`` for the release: - * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release + * Use ``git`` to rename ``docs/src/whatsnew/dev.rst`` to the release version file ``v1.9.rst`` - * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file + * Update the symbolic link ``latest.rst`` to reference the latest + whatsnew ``v1.9.rst`` + * Use ``git`` to delete the ``docs/src/whatsnew/dev.rst.template`` file * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. Note that, the Iris version and release date are updated automatically when the documentation is built @@ -221,11 +231,11 @@ Release Steps dropdown at the top of the file, which provides extra detail on notable changes * Use ``git`` to add and commit all changes, including removal of - ``latest.rst.template`` + ``dev.rst.template`` and update to the ``latest.rst`` symbolic link. #. Update the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` - * Remove the reference to ``latest.rst`` + * Remove the reference to ``dev.rst`` * Add a reference to ``v1.9.rst`` to the top of the list #. Check your changes by building the documentation and reviewing @@ -246,13 +256,6 @@ Post Release Steps `Read The Docs`_ to ensure that the appropriate versions are ``Active`` and/or ``Hidden``. To do this ``Edit`` the appropriate version e.g., see `Editing v3.0.0rc0`_ (must be logged into Read the Docs). -#. Copy ``docs/src/whatsnew/latest.rst.template`` to - ``docs/src/whatsnew/latest.rst``. This will reset - the file with the ``unreleased`` heading and placeholders for the - ``whatsnew`` headings -#. Add back in the reference to ``latest.rst`` to the ``whatsnew`` index - ``docs/src/whatsnew/index.rst`` -#. Update ``iris.__init__.py`` version string to show as ``1.10.dev0`` #. Merge back to ``main`` @@ -268,3 +271,4 @@ Post Release Steps .. _rc_iris: https://anaconda.org/conda-forge/iris/labels .. _Generating Distribution Archives: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives .. _Packaging Your Project: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project +.. _latest CF standard names: http://cfconventions.org/standard-names.html \ No newline at end of file diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst index dc162d6a1e2..81bff2f7641 100644 --- a/docs/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -13,14 +13,14 @@ that may be of interest to the more advanced or curious user. .. hint:: If you wish further documentation on any specific topics or areas of Iris - that are missing, then please let us know by raising a `GitHub Documentation Issue`_ + that are missing, then please let us know by raising a :issue:`GitHub Documentation Issue` on `SciTools/Iris`_. * :doc:`metadata` * :doc:`lenient_metadata` * :doc:`lenient_maths` +* :ref:`ugrid` -.. _GitHub Documentation Issue: https://github.com/SciTools/iris/issues/new?assignees=&labels=New%3A+Documentation%2C+Type%3A+Documentation&template=documentation.md&title= .. _SciTools/iris: https://github.com/SciTools/iris diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/further_topics/lenient_maths.rst index 643bd37e76b..818efe47632 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/further_topics/lenient_maths.rst @@ -84,10 +84,10 @@ represents the output of an low-resolution global atmospheric ``experiment``, forecast_reference_time 2009-09-09 17:10:00 time 2009-09-09 17:10:00 Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i004 - experiment-id RT3 50 - source Data from Met Office Unified Model 7.04 + experiment-id 'RT3 50' + source 'Data from Met Office Unified Model 7.04' Consider also the following :class:`~iris.cube.Cube`, which has the same global spatial extent, and acts as a ``control``, @@ -103,9 +103,9 @@ spatial extent, and acts as a ``control``, model_level_number 1 time 2009-09-09 17:10:00 Attributes: - Conventions CF-1.7 + Conventions 'CF-1.7' STASH m01s00i004 - source Data from Met Office Unified Model 7.04 + source 'Data from Met Office Unified Model 7.04' Now let's subtract these cubes in order to calculate a simple ``difference``, @@ -129,8 +129,8 @@ Now let's subtract these cubes in order to calculate a simple ``difference``, forecast_reference_time 2009-09-09 17:10:00 time 2009-09-09 17:10:00 Attributes: - experiment-id RT3 50 - source Data from Met Office Unified Model 7.04 + experiment-id 'RT3 50' + source 'Data from Met Office Unified Model 7.04' Note that, cube maths automatically takes care of broadcasting the dimensionality of the ``control`` up to that of the ``experiment``, in order to @@ -218,7 +218,7 @@ time perform **strict** cube maths instead, Scalar coordinates: time 2009-09-09 17:10:00 Attributes: - source Data from Met Office Unified Model 7.04 + source 'Data from Met Office Unified Model 7.04' Although the numerical result of this strict cube maths operation is identical, it is not as rich in metadata as the :ref:`lenient alternative `. diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 79e9c164a0d..1b81f7055c2 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -38,8 +38,8 @@ Collectively, the aforementioned classes will be known here as the Iris .. hint:: If there are any `CF Conventions`_ metadata missing from Iris that you - care about, then please let us know by raising a `GitHub Issue`_ on - `SciTools/iris`_ + care about, then please let us know by raising a :issue:`GitHub Issue` + on `SciTools/iris`_ Common Metadata @@ -120,10 +120,10 @@ For example, given the following :class:`~iris.cube.Cube`, Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' We can easily get all of the associated metadata of the :class:`~iris.cube.Cube` using the ``metadata`` property: @@ -990,7 +990,6 @@ values. All other metadata members will be left unaltered. .. _CF Conventions: https://cfconventions.org/ .. _Cell Measures: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#cell-measures .. _Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags -.. _GitHub Issue: https://github.com/SciTools/iris/issues/new/choose .. _mapping: https://docs.python.org/3/glossary.html#term-mapping .. _namedtuple: https://docs.python.org/3/library/collections.html#collections.namedtuple .. _namedtuple._make: https://docs.python.org/3/library/collections.html#collections.somenamedtuple._make diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst new file mode 100644 index 00000000000..4a2f64f6279 --- /dev/null +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -0,0 +1,566 @@ +.. include:: ../../common_links.inc + +.. _ugrid model: + +The Mesh Data Model +******************* + +.. important:: + + This page is intended to summarise the essentials that Iris users need + to know about meshes. For exhaustive details on UGRID itself: + `visit the official UGRID conventions site`__. + +Evolution, not revolution +========================= +Mesh support has been designed wherever possible to fit within the existing +Iris model. Meshes concern only the spatial geography of data, and can +optionally be limited to just the horizontal geography (e.g. X and Y). Other +dimensions such as time or ensemble member (and often vertical levels) +retain their familiar structured format. + +The UGRID conventions themselves are designed as an addition to the existing CF +conventions, which are at the core of Iris' philosophy. + +What's Different? +================= + +The mesh format represents data's geography using an **unstructured +mesh**. This has significant pros and cons when compared to a structured grid. + +.. contents:: + :local: + +The Detail +---------- +.. + The diagram images are SVG's, so editable by any graphical software + (e.g. Inkscape). They were originally made in MS PowerPoint. + + Uses the IBM Colour Blind Palette (see + http://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library + ) + +Structured Grids (the old world) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Assigning data to locations using a structured grid is essentially an act of +matching coordinate arrays to each dimension of the data array. The data can +also be represented as an area (instead of a point) by including a bounds array +for each coordinate array. :numref:`data_structured_grid` visualises an +example. + +.. _data_structured_grid: +.. figure:: images/data_structured_grid.svg + :alt: Diagram of how data is represented on a structured grid + :align: right + :width: 1280 + + Data on a structured grid. + + 1D coordinate arrays (pink circles) are combined to construct a structured + grid of points (pink crosses). 2D bounds arrays (blue circles) can also be + used to describe the 1D boundaries (blue lines) at either side of each + rank of points; each point therefore having four bounds (x+y, upper+lower), + together describing a quadrilateral area around that point. Data from the + 2D data array (orange circles) can be assigned to these point locations + (orange diamonds) or area locations (orange quads) by matching the relative + positions in the data array to the relative spatial positions - see the + black outlined shapes as examples of this in action. + +Unstructured Meshes (the new world) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +A mesh is made up of different types of **element**: + +.. list-table:: + :widths: 15, 15, 70 + + * - 0D + - ``node`` + - The 'core' of the mesh. A point position in space, constructed from + 2 or 3 coordinates (2D or 3D space). + * - 1D + - ``edge`` + - Constructed by connecting 2 nodes. + * - 2D + - ``face`` + - Constructed by connecting 3 or more nodes. + * - 3D + - ``volume`` + - Constructed by connecting 4 or more nodes (which must each have 3 + coordinates - 3D space). + +Every node in the mesh is defined by indexing the 1-dimensional X and Y (and +optionally Z) coordinate arrays (the ``node_coordinates``) - e.g. +``(x[3], y[3])`` gives the position of the fourth node. Note that this means +each node has its own coordinates, independent of every other node. + +Any higher dimensional element - an edge/face/volume - is described by a +sequence of the indices of the nodes that make up that element. E.g. a +triangular face made from connecting the first, third and fourth nodes: +``[0, 2, 3]``. These 1D sequences combine into a 2D array enumerating **all** +the elements of that type - edge/face/volume - called a **connectivity**. +E.g. we could make a mesh of 4 nodes, with 2 triangles described using this +``face_node_connectivity``: ``[[0, 2, 3], [3, 2, 1]]`` (note the shared nodes). + +.. note:: More on Connectivities: + + * The element type described by a connectivity is known as its + **location**; ``edge`` in ``edge_node_connectivity``. + * According to the UGRID conventions, the nodes in a face should be + listed in "anti-clockwise order from above". + * Connectivities also exist to connect the higher dimensional elements, + e.g. ``face_edge_connectivity``. These are optional conveniences to + speed up certain operations and will not be discussed here. + +.. important:: + + **Meshes are unstructured**. The mesh elements - represented in the + coordinate and connectivity arrays detailed above - are enumerated + along a single **unstructured dimension**. An element's position along + this dimension has nothing to do with its spatial position. + +A data variable associated with a mesh has a **location** of either ``node``, +``edge``, ``face`` or ``volume``. The data is stored in a 1D array with one +datum per element, matched to its element by matching the datum index with the +coordinate or connectivity index along the **unstructured dimension**. So for +an example data array called ``foo``: +``foo[3]`` would be at position ``(x[3], y[3])`` if it were node-located, or at +``faces[3]`` if it were face-located. :numref:`data_ugrid_mesh` visualises an +example of what is described above. + +.. _data_ugrid_mesh: +.. figure:: images/data_ugrid_mesh.svg + :alt: Diagram of how data is represented on an unstructured mesh + :align: right + :width: 1280 + + Data on an unstructured mesh + + 1D coordinate arrays (pink circles) describe node positions in space (pink + crosses). A 2D connectivity array (blue circles) describes faces by + connecting four nodes - by referencing their indices - into a face outline + (blue outlines on the map). Data from the 1D data array (orange circles) + can be assigned to these node locations (orange diamonds) or face locations + (orange quads) by matching the indices in the data array to the indices in + the coordinate arrays (for nodes) or connectivity array (for faces). See + the black outlined shapes as examples of index matching in action, and the + black stippled shapes to demonstrate that relative array position confers + no relative spatial information. + +---- + +The mesh model also supports edges/faces/volumes having associated 'centre' +coordinates - to allow point data to be assigned to these elements. 'Centre' is +just a convenience term - the points can exist anywhere within their respective +elements. See :numref:`ugrid_element_centres` for a visualised example. + +.. _ugrid_element_centres: +.. figure:: images/ugrid_element_centres.svg + :alt: Diagram demonstrating mesh face-centred data. + :align: right + :width: 1280 + + Data can be assigned to mesh edge/face/volume 'centres' + + 1D *node* coordinate arrays (pink circles) describe node positions in + space (pink crosses). A 2D connectivity array (blue circles) describes + faces by connecting four nodes into a face outline (blue outlines on the + map). Further 1D *face* coordinate arrays (pink circles) describe a + 'centre' point position (pink stars) for each face enumerated in the + connectivity array. + +Mesh Flexibility +++++++++++++++++ +Above we have seen how one could replicate data on a structured grid using +a mesh instead. But the utility of a mesh is the extra flexibility it offers. +Here are the main examples: + +Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :numref:`ugrid_node_independence`. + +.. _ugrid_node_independence: +.. figure:: images/ugrid_node_independence.svg + :alt: Diagram demonstrating the independence of each mesh node + :align: right + :width: 300 + + Every mesh node is completely independent + + The same array shape and structure used to describe the node positions + (pink crosses) in a regular grid (left-hand maps) is equally able to + describe **any** position for these nodes (e.g. the right-hand maps), + simply by changing the array values. The quadrilateral faces (blue + outlines) can therefore be given any quadrilateral shape by re-positioning + their constituent nodes. + +Faces and volumes can have variable node counts, i.e. different numbers of +sides. This is achieved by masking the unused 'slots' in the connectivity +array. See :numref:`ugrid_variable_faces`. + +.. _ugrid_variable_faces: +.. figure:: images/ugrid_variable_faces.svg + :alt: Diagram demonstrating mesh faces with variable node counts + :align: right + :width: 300 + + Mesh faces can have different node counts (using masking) + + The 2D connectivity array (blue circles) describes faces by connecting + nodes (pink crosses) to make up a face (blue outlines). The faces can use + different numbers of nodes by shaping the connectivity array to accommodate + the face with the most nodes, then masking unused node 'slots' + (black circles) for faces with fewer nodes than the maximum. + +Data can be assigned to lines (edges) just as easily as points (nodes) or +areas (faces). See :numref:`ugrid_edge_data`. + +.. _ugrid_edge_data: +.. figure:: images/ugrid_edge_data.svg + :alt: Diagram demonstrating data assigned to mesh edges + :align: right + :width: 300 + + Data can be assigned to mesh edges + + The 2D connectivity array (blue circles) describes edges by connecting 2 + nodes (pink crosses) to make up an edge (blue lines). Data can be assigned + to the edges (orange lines) by matching the indices of the 1D data array + (not shown) to the indices in the connectivity array. + +.. _ugrid implications: + +What does this mean? +-------------------- +Meshes can represent much more varied spatial arrangements +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The highly specific way of recording position (geometry) and shape +(topology) allows meshes to represent essentially **any** spatial arrangement +of data. There are therefore many new applications that aren't possible using a +structured grid, including: + +* `The UK Met Office's LFRic cubed-sphere `_ +* `Oceanic model outputs `_ + +.. todo: + a third example! + +Mesh 'payload' is much larger than with structured grids +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Coordinates are recorded per-node, and connectivities are recorded per-element. +This is opposed to a structured grid, where a single coordinate value is shared +by every data point/area along that line. + +For example: representing the surface of a cubed-sphere using a mesh leads to +coordinates and connectivities being **~8 times larger than the data itself**, +as opposed to a small fraction of the data size when dividing a spherical +surface using a structured grid of longitudes and latitudes. + +This further increases the emphasis on lazy loading and processing of data +using packages such as Dask. + +.. note:: + + The large, 1D data arrays associated with meshes are a very different + shape to what Iris users and developers are used to. It is suspected + that optimal performance will need new chunking strategies, but at time + of writing (``Jan 2022``) experience is still limited. + +.. todo: + Revisit when we have more information. + +Spatial operations on mesh data are more complex +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Detail: :doc:`operations` + +Indexing a mesh data array cannot be used for: + +#. Region selection +#. Neighbour identification + +This is because - unlike with a structured data array - relative position in +a mesh's 1-dimensional data arrays has no relation to relative position in +space. We must instead perform specialised operations using the information in +the mesh's connectivities, or by translating the mesh into a format designed +for mesh analysis such as VTK. + +Such calculations can still be optimised to avoid them slowing workflows, but +the important take-away here is that **adaptation is needed when working mesh +data**. + + +How Iris Represents This +======================== + +.. + Include API links to the various classes + + Include Cube/Mesh printout(s) + +.. seealso:: + + Remember this is a prose summary. Precise documentation is at: + :mod:`iris.experimental.ugrid`. + +.. note:: + + At time of writing (``Jan 2022``), neither 3D meshes nor 3D elements + (volumes) are supported. + +The Basics +---------- +The Iris :class:`~iris.cube.Cube` has several new members: + +* | :attr:`~iris.cube.Cube.mesh` + | The :class:`iris.experimental.ugrid.Mesh` that describes the + :class:`~iris.cube.Cube`\'s horizontal geography. +* | :attr:`~iris.cube.Cube.location` + | ``node``/``edge``/``face`` - the mesh element type with which this + :class:`~iris.cube.Cube`\'s :attr:`~iris.cube.Cube.data` is associated. +* | :meth:`~iris.cube.Cube.mesh_dim` + | The :class:`~iris.cube.Cube`\'s **unstructured dimension** - the one that + indexes over the horizontal :attr:`~iris.cube.Cube.data` positions. + +These members will all be ``None`` for a :class:`~iris.cube.Cube` with no +associated :class:`~iris.experimental.ugrid.Mesh`. + +This :class:`~iris.cube.Cube`\'s unstructured dimension has multiple attached +:class:`iris.experimental.ugrid.MeshCoord`\s (one for each axis e.g. +``x``/``y``), which can be used to infer the points and bounds of any index on +the :class:`~iris.cube.Cube`\'s unstructured dimension. + +.. testsetup:: ugrid_summaries + + import numpy as np + + from iris.coords import AuxCoord, DimCoord + from iris.cube import Cube + from iris.experimental.ugrid import Connectivity, Mesh + + node_x = AuxCoord( + points=[0.0, 5.0, 0.0, 5.0, 8.0], + standard_name="longitude", + units="degrees_east", + ) + node_y = AuxCoord( + points=[3.0, 3.0, 0.0, 0.0, 0.0], + standard_name="latitude", + units="degrees_north", + ) + + edge_node_c = Connectivity( + indices=[[0, 1], [0, 2], [1, 3], [1, 4], [2, 3], [3, 4]], + cf_role="edge_node_connectivity", + ) + + face_indices = np.ma.masked_equal([[0, 1, 3, 2], [1, 4, 3, 999]], 999) + face_node_c = Connectivity( + indices=face_indices, cf_role="face_node_connectivity" + ) + + def centre_coords(conn): + indexing = np.ma.filled(conn.indices, 0) + x, y = [ + AuxCoord( + node_coord.points[indexing].mean(axis=conn.connected_axis), + node_coord.standard_name, + units=node_coord.units, + ) + for node_coord in (node_x, node_y) + ] + return [(x, "x"), (y, "y")] + + my_mesh = Mesh( + long_name="my_mesh", + topology_dimension=2, + node_coords_and_axes=[(node_x, "x"), (node_y, "y")], + connectivities=[edge_node_c, face_node_c], + edge_coords_and_axes=centre_coords(edge_node_c), + face_coords_and_axes=centre_coords(face_node_c), + ) + + vertical_levels = DimCoord([0, 1, 2], "height") + + def location_cube(conn): + location = conn.location + mesh_coord_x, mesh_coord_y = my_mesh.to_MeshCoords(location) + data_shape = (conn.shape[conn.location_axis], len(vertical_levels.points)) + data_array = np.arange(np.prod(data_shape)).reshape(data_shape) + + return Cube( + data=data_array, + long_name=f"{location}_data", + units="K", + dim_coords_and_dims=[(vertical_levels, 1)], + aux_coords_and_dims=[(mesh_coord_x, 0), (mesh_coord_y, 0)], + ) + + edge_cube = location_cube(edge_node_c) + face_cube = location_cube(face_node_c) + +.. doctest:: ugrid_summaries + + >>> print(edge_cube) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + >>> print(edge_cube.location) + edge + + >>> print(edge_cube.mesh_dim()) + 0 + + >>> print(edge_cube.mesh.summary(shorten=True)) + + +The Detail +---------- +How UGRID information is stored +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +* | :class:`iris.experimental.ugrid.Mesh` + | Contains all information about the mesh. + | Includes: + + * | :attr:`~iris.experimental.ugrid.Mesh.topology_dimension` + | The maximum dimensionality of shape (1D=edge, 2D=face) supported + by this :class:`~iris.experimental.ugrid.Mesh`. Determines which + :class:`~iris.experimental.ugrid.Connectivity`\s are required/optional + (see below). + + * 1-3 collections of :class:`iris.coords.AuxCoord`\s: + + * | **Required**: :attr:`~iris.experimental.ugrid.Mesh.node_coords` + | The nodes that are the basis for the mesh. + * | Optional: :attr:`~iris.experimental.ugrid.Mesh.edge_coords`, + :attr:`~iris.experimental.ugrid.Mesh.face_coords` + | For indicating the 'centres' of the edges/faces. + | **NOTE:** generating a :class:`~iris.experimental.ugrid.MeshCoord` from + a :class:`~iris.experimental.ugrid.Mesh` currently (``Jan 2022``) + requires centre coordinates for the given ``location``; to be rectified + in future. + + * 1 or more :class:`iris.experimental.ugrid.Connectivity`\s: + + * | **Required for 1D (edge) elements**: + :attr:`~iris.experimental.ugrid.Mesh.edge_node_connectivity` + | Define the edges by connecting nodes. + * | **Required for 2D (face) elements**: + :attr:`~iris.experimental.ugrid.Mesh.face_node_connectivity` + | Define the faces by connecting nodes. + * Optional: any other connectivity type. See + :attr:`iris.experimental.ugrid.mesh.Connectivity.UGRID_CF_ROLES` for the + full list of types. + +.. doctest:: ugrid_summaries + + >>> print(edge_cube.mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: + edge coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + long_name: 'my_mesh' + +* | :class:`iris.experimental.ugrid.MeshCoord` + | Described in detail in `MeshCoords`_. + | Stores the following information: + + * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` + | The :class:`~iris.experimental.ugrid.Mesh` associated with this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` + this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_) + + * | :attr:`~iris.experimental.ugrid.MeshCoord.location` + | ``node``/``edge``/``face`` - the element detailed by this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.location` attribute of any + :class:`~iris.cube.Cube` this + :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_). + +.. _ugrid MeshCoords: + +MeshCoords +~~~~~~~~~~ +Links a :class:`~iris.cube.Cube` to a :class:`~iris.experimental.ugrid.Mesh` by +attaching to the :class:`~iris.cube.Cube`\'s unstructured dimension, in the +same way that all :class:`~iris.coords.Coord`\s attach to +:class:`~iris.cube.Cube` dimensions. This allows a single +:class:`~iris.cube.Cube` to have a combination of unstructured and structured +dimensions (e.g. horizontal mesh plus vertical levels and a time series), +using the same logic for every dimension. + +:class:`~iris.experimental.ugrid.MeshCoord`\s are instantiated using a given +:class:`~iris.experimental.ugrid.Mesh`, ``location`` +("node"/"edge"/"face") and ``axis``. The process interprets the +:class:`~iris.experimental.ugrid.Mesh`\'s +:attr:`~iris.experimental.ugrid.Mesh.node_coords` and if appropriate the +:attr:`~iris.experimental.ugrid.Mesh.edge_node_connectivity`/ +:attr:`~iris.experimental.ugrid.Mesh.face_node_connectivity` and +:attr:`~iris.experimental.ugrid.Mesh.edge_coords`/ +:attr:`~iris.experimental.ugrid.Mesh.face_coords` +to produce a :class:`~iris.coords.Coord` +:attr:`~iris.coords.Coord.points` and :attr:`~iris.coords.Coord.bounds` +representation of all the :class:`~iris.experimental.ugrid.Mesh`\'s +nodes/edges/faces for the given axis. + +The method :meth:`iris.experimental.ugrid.Mesh.to_MeshCoords` is available to +create a :class:`~iris.experimental.ugrid.MeshCoord` for +every axis represented by that :class:`~iris.experimental.ugrid.Mesh`, +given only the ``location`` argument + +.. doctest:: ugrid_summaries + + >>> for coord in edge_cube.coords(mesh_coords=True): + ... print(coord) + MeshCoord : latitude / (degrees_north) + mesh: + location: 'edge' + points: [3. , 1.5, 1.5, 1.5, 0. , 0. ] + bounds: [ + [3., 3.], + [3., 0.], + [3., 0.], + [3., 0.], + [0., 0.], + [0., 0.]] + shape: (6,) bounds(6, 2) + dtype: float64 + standard_name: 'latitude' + axis: 'y' + MeshCoord : longitude / (degrees_east) + mesh: + location: 'edge' + points: [2.5, 0. , 5. , 6.5, 2.5, 6.5] + bounds: [ + [0., 5.], + [0., 0.], + [5., 5.], + [5., 8.], + [0., 5.], + [5., 8.]] + shape: (6,) bounds(6, 2) + dtype: float64 + standard_name: 'longitude' + axis: 'x' + + +__ CF-UGRID_ \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/data_structured_grid.svg b/docs/src/further_topics/ugrid/images/data_structured_grid.svg new file mode 100644 index 00000000000..2f3a1ce342a --- /dev/null +++ b/docs/src/further_topics/ugrid/images/data_structured_grid.svg @@ -0,0 +1 @@ +23, 28-19,-21101525-5-15-20-30xyCoordinate ArraysxyCoordinate Arrays23, 28-19, -21xyBounds Arraysderive point locationsassign data using dimensional indices,position in array == relative spatial positionderive area locations & shapesPoint DataArea DataData Array(bounded coordsalways have points too)my_variable* x+yare not lons+lats, just a demonstration! \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg b/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg new file mode 100644 index 00000000000..ab7302346b7 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg @@ -0,0 +1 @@ +5, 7, 8, 14`xy1212`node_coordinates`every node has its own x + y coordinatesderive node locations1515xy`node_coordinates`[5][7][8][14]construct faces by connecting nodesderive ‘corner’ node locationsassign data using 1D indexing,position in array unrelated to spatial positionmatch indices with facesmatch indices with nodesNode DataFace Data12Data Arraymy_variable12 ×4`face_node_connectivity`face_nodes \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/geovistalogo.svg b/docs/src/further_topics/ugrid/images/geovistalogo.svg new file mode 100644 index 00000000000..4c68f0ee3ff --- /dev/null +++ b/docs/src/further_topics/ugrid/images/geovistalogo.svg @@ -0,0 +1,573 @@ + + + + + + + + + + + + + + + + + + + + + + + + Cartographic rendering and mesh analytics powered by PyVista. + GeoVista + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + GeoVista + + \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg b/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg new file mode 100644 index 00000000000..e70a9386a7f --- /dev/null +++ b/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg @@ -0,0 +1,93 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + Iris + + diff --git a/docs/src/further_topics/ugrid/images/plotting_basic.png b/docs/src/further_topics/ugrid/images/plotting_basic.png new file mode 100644 index 00000000000..ba2b0b3329d Binary files /dev/null and b/docs/src/further_topics/ugrid/images/plotting_basic.png differ diff --git a/docs/src/further_topics/ugrid/images/plotting_global.png b/docs/src/further_topics/ugrid/images/plotting_global.png new file mode 100644 index 00000000000..62fb56d9749 Binary files /dev/null and b/docs/src/further_topics/ugrid/images/plotting_global.png differ diff --git a/docs/src/further_topics/ugrid/images/ugrid_edge_data.svg b/docs/src/further_topics/ugrid/images/ugrid_edge_data.svg new file mode 100644 index 00000000000..374ef573880 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_edge_data.svg @@ -0,0 +1 @@ +`edge_node_connectivity`12 ×2 \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg b/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg new file mode 100644 index 00000000000..13b885d6005 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg @@ -0,0 +1 @@ +`face_node_connectivity`xy`node_coordinates`xy`face_coordinates`151512 ×41212`face_coordinates``node_coordinates` \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg b/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg new file mode 100644 index 00000000000..ba72c42ffaf --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg @@ -0,0 +1 @@ +` \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg b/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg new file mode 100644 index 00000000000..378978abc39 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg @@ -0,0 +1 @@ +`face_node_connectivity`12 ×6 \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/further_topics/ugrid/index.rst new file mode 100644 index 00000000000..81ba24428a9 --- /dev/null +++ b/docs/src/further_topics/ugrid/index.rst @@ -0,0 +1,54 @@ +.. include:: ../../common_links.inc + +.. _ugrid: + +Mesh Support +************ + +Iris includes specialised handling of mesh-located data (as opposed to +grid-located data). Iris and its :ref:`partner packages ` are +designed to make working with mesh-located data as simple as possible, with new +capabilities being added all the time. More detail is in this section and in +the :mod:`iris.experimental.ugrid` API documentation. + +This mesh support is based on the `CF-UGRID Conventions`__; UGRID-conformant +meshes + data can be loaded from a file into Iris' data model, and meshes + +data represented in Iris' data model can be saved as a UGRID-conformant file. + +---- + +Meshes are different + Mesh-located data is fundamentally different to grid-located data. + Many of Iris' existing operations need adapting before they can work with + mesh-located data, and in some cases entirely new concepts are needed. + **Read the detail in these pages before jumping into your own code.** +Iris' mesh support is experimental + This is a rapidly evolving part of the codebase at time of writing + (``Jan 2022``), as we continually expand the operations that work with mesh + data. **Be prepared for breaking changes even in minor releases.** +:ref:`Get involved! ` + We know meshes are an exciting new area for much of Earth science, so we hope + there are a lot of you with new files/ideas/wishlists, and we'd love to hear + more 🙂. + +---- + +Read on to find out more... + +* :doc:`data_model` - learn why the mesh experience is so different. +* :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. +* :doc:`operations` - experience how your workflows will look when written for mesh data. + +.. + Need an actual TOC to get Sphinx working properly, but have hidden it in + favour of the custom bullets above. + +.. toctree:: + :hidden: + :maxdepth: 1 + + data_model + partner_packages + operations + +__ CF-UGRID_ diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst new file mode 100644 index 00000000000..f96e3e406c9 --- /dev/null +++ b/docs/src/further_topics/ugrid/operations.rst @@ -0,0 +1,995 @@ +.. _ugrid operations: + +Working with Mesh Data +********************** + +.. note:: Several of the operations below rely on the optional dependencies + mentioned in :doc:`partner_packages`. + +Operations Summary +------------------ +.. list-table:: + :align: left + :widths: 35, 75 + + * - `Making a Mesh`_ + - |tagline: making a mesh| + * - `Making a Cube`_ + - |tagline: making a cube| + * - `Save`_ + - |tagline: save| + * - `Load`_ + - |tagline: load| + * - `Plotting`_ + - |tagline: plotting| + * - `Region Extraction`_ + - |tagline: region extraction| + * - `Regridding`_ + - |tagline: regridding| + * - `Equality`_ + - |tagline: equality| + * - `Combining Cubes`_ + - |tagline: combining cubes| + * - `Arithmetic`_ + - |tagline: arithmetic| + +.. + Below: use demo code over prose wherever workable. Headings aren't an + exhaustive list (can you think of any other popular operations?). + +Making a Mesh +------------- +.. |tagline: making a mesh| replace:: |new| + +.. rubric:: |tagline: making a mesh| + +**Already have a file?** Consider skipping to `Load`_. + +Creating Iris objects from scratch is a highly useful skill for testing code +and improving understanding of how Iris works. This knowledge will likely prove +particularly useful when converting data into the Iris mesh data model from +structured formats and non-UGRID mesh formats. + +The objects created in this example will be used where possible in the +subsequent example operations on this page. + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> import numpy as np + + >>> from iris.coords import AuxCoord + >>> from iris.experimental.ugrid import Connectivity, Mesh + + # Going to create the following mesh + # (node indices are shown to aid understanding): + # + # 0----1 + # | |\ + # | + |+\ + # 2----3--4 + + >>> node_x = AuxCoord( + ... points=[0.0, 5.0, 0.0, 5.0, 8.0], + ... standard_name="longitude", + ... units="degrees_east", + ... long_name="node_x_coordinates", + ... ) + >>> node_y = AuxCoord(points=[3.0, 3.0, 0.0, 0.0, 0.0], standard_name="latitude") + + >>> face_x = AuxCoord([2.0, 6.0], "longitude") + >>> face_y = AuxCoord([1.0, 1.0], "latitude") + + >>> edge_node_c = Connectivity( + ... indices=[[0, 1], [0, 2], [1, 3], [1, 4], [2, 3], [3, 4]], + ... cf_role="edge_node_connectivity", + ... attributes={"demo": "Supports every standard CF property"}, + ... ) + + # Create some dead-centre edge coordinates. + >>> edge_x, edge_y = [ + ... AuxCoord( + ... node_coord.points[edge_node_c.indices_by_location()].mean(axis=1), + ... node_coord.standard_name, + ... ) + ... for node_coord in (node_x, node_y) + ... ] + + >>> face_indices = np.ma.masked_equal([[0, 1, 3, 2], [1, 4, 3, 999]], 999) + >>> face_node_c = Connectivity( + ... indices=face_indices, cf_role="face_node_connectivity" + ... ) + + >>> my_mesh = Mesh( + ... long_name="my_mesh", + ... topology_dimension=2, # Supports 2D (face) elements. + ... node_coords_and_axes=[(node_x, "x"), (node_y, "y")], + ... connectivities=[edge_node_c, face_node_c], + ... edge_coords_and_axes=[(edge_x, "x"), (edge_y, "y")], + ... face_coords_and_axes=[(face_x, "x"), (face_y, "y")], + ... ) + + >>> print(my_mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: + edge coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + long_name: 'my_mesh' + + +.. _making a cube: + +Making a Cube (with a Mesh) +--------------------------- +.. |tagline: making a cube| replace:: |unchanged| + +.. rubric:: |tagline: making a cube| + +Creating a :class:`~iris.cube.Cube` is unchanged; the +:class:`~iris.experimental.ugrid.Mesh` is linked via a +:class:`~iris.experimental.ugrid.MeshCoord` (see :ref:`ugrid MeshCoords`): + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> import numpy as np + + >>> from iris.coords import DimCoord + >>> from iris.cube import Cube, CubeList + + >>> vertical_levels = DimCoord([0, 1, 2], "height") + + >>> my_cubelist = CubeList() + >>> for conn in (edge_node_c, face_node_c): + ... location = conn.location + ... mesh_coord_x, mesh_coord_y = my_mesh.to_MeshCoords(location) + ... data_shape = (len(conn.indices_by_location()), len(vertical_levels.points)) + ... data_array = np.arange(np.prod(data_shape)).reshape(data_shape) + ... + ... my_cubelist.append( + ... Cube( + ... data=data_array, + ... long_name=f"{location}_data", + ... units="K", + ... dim_coords_and_dims=[(vertical_levels, 1)], + ... aux_coords_and_dims=[(mesh_coord_x, 0), (mesh_coord_y, 0)], + ... ) + ... ) + + >>> print(my_cubelist) + 0: edge_data / (K) (-- : 6; height: 3) + 1: face_data / (K) (-- : 2; height: 3) + + >>> for cube in my_cubelist: + ... print(f"{cube.name()}: {cube.mesh.name()}, {cube.location}") + edge_data: my_mesh, edge + face_data: my_mesh, face + + >>> print(my_cubelist.extract_cube("edge_data")) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + +Save +---- +.. |tagline: save| replace:: |unchanged| + +.. rubric:: |tagline: save| + +.. note:: UGRID saving support is limited to the NetCDF file format. + +The Iris saving process automatically detects if the :class:`~iris.cube.Cube` +has an associated :class:`~iris.experimental.ugrid.Mesh` and automatically +saves the file in a UGRID-conformant format: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from subprocess import run + + >>> from iris import save + + >>> cubelist_path = "my_cubelist.nc" + >>> save(my_cubelist, cubelist_path) + + >>> ncdump_result = run(["ncdump", "-h", cubelist_path], capture_output=True) + >>> print(ncdump_result.stdout.decode().replace("\t", " ")) + netcdf my_cubelist { + dimensions: + Mesh2d_node = 5 ; + Mesh2d_edge = 6 ; + Mesh2d_face = 2 ; + height = 3 ; + my_mesh_face_N_nodes = 4 ; + my_mesh_edge_N_nodes = 2 ; + variables: + int my_mesh ; + my_mesh:cf_role = "mesh_topology" ; + my_mesh:topology_dimension = 2 ; + my_mesh:long_name = "my_mesh" ; + my_mesh:node_coordinates = "longitude latitude" ; + my_mesh:edge_coordinates = "longitude_0 latitude_0" ; + my_mesh:face_coordinates = "longitude_1 latitude_1" ; + my_mesh:face_node_connectivity = "mesh2d_face" ; + my_mesh:edge_node_connectivity = "mesh2d_edge" ; + double longitude(Mesh2d_node) ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "node_x_coordinates" ; + double latitude(Mesh2d_node) ; + latitude:standard_name = "latitude" ; + double longitude_0(Mesh2d_edge) ; + longitude_0:standard_name = "longitude" ; + double latitude_0(Mesh2d_edge) ; + latitude_0:standard_name = "latitude" ; + double longitude_1(Mesh2d_face) ; + longitude_1:standard_name = "longitude" ; + double latitude_1(Mesh2d_face) ; + latitude_1:standard_name = "latitude" ; + int64 mesh2d_face(Mesh2d_face, my_mesh_face_N_nodes) ; + mesh2d_face:_FillValue = -1LL ; + mesh2d_face:cf_role = "face_node_connectivity" ; + mesh2d_face:start_index = 0LL ; + int64 mesh2d_edge(Mesh2d_edge, my_mesh_edge_N_nodes) ; + mesh2d_edge:demo = "Supports every standard CF property" ; + mesh2d_edge:cf_role = "edge_node_connectivity" ; + mesh2d_edge:start_index = 0LL ; + int64 edge_data(Mesh2d_edge, height) ; + edge_data:long_name = "edge_data" ; + edge_data:units = "K" ; + edge_data:mesh = "my_mesh" ; + edge_data:location = "edge" ; + int64 height(height) ; + height:standard_name = "height" ; + int64 face_data(Mesh2d_face, height) ; + face_data:long_name = "face_data" ; + face_data:units = "K" ; + face_data:mesh = "my_mesh" ; + face_data:location = "face" ; + + // global attributes: + :Conventions = "CF-1.7" ; + } + + +The :func:`iris.experimental.ugrid.save_mesh` function allows +:class:`~iris.experimental.ugrid.Mesh`\es to be saved to file without +associated :class:`~iris.cube.Cube`\s: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from subprocess import run + + >>> from iris.experimental.ugrid import save_mesh + + >>> mesh_path = "my_mesh.nc" + >>> save_mesh(my_mesh, mesh_path) + + >>> ncdump_result = run(["ncdump", "-h", mesh_path], capture_output=True) + >>> print(ncdump_result.stdout.decode().replace("\t", " ")) + netcdf my_mesh { + dimensions: + Mesh2d_node = 5 ; + Mesh2d_edge = 6 ; + Mesh2d_face = 2 ; + my_mesh_face_N_nodes = 4 ; + my_mesh_edge_N_nodes = 2 ; + variables: + int my_mesh ; + my_mesh:cf_role = "mesh_topology" ; + my_mesh:topology_dimension = 2 ; + my_mesh:long_name = "my_mesh" ; + my_mesh:node_coordinates = "longitude latitude" ; + my_mesh:edge_coordinates = "longitude_0 latitude_0" ; + my_mesh:face_coordinates = "longitude_1 latitude_1" ; + my_mesh:face_node_connectivity = "mesh2d_face" ; + my_mesh:edge_node_connectivity = "mesh2d_edge" ; + double longitude(Mesh2d_node) ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "node_x_coordinates" ; + double latitude(Mesh2d_node) ; + latitude:standard_name = "latitude" ; + double longitude_0(Mesh2d_edge) ; + longitude_0:standard_name = "longitude" ; + double latitude_0(Mesh2d_edge) ; + latitude_0:standard_name = "latitude" ; + double longitude_1(Mesh2d_face) ; + longitude_1:standard_name = "longitude" ; + double latitude_1(Mesh2d_face) ; + latitude_1:standard_name = "latitude" ; + int64 mesh2d_face(Mesh2d_face, my_mesh_face_N_nodes) ; + mesh2d_face:_FillValue = -1LL ; + mesh2d_face:cf_role = "face_node_connectivity" ; + mesh2d_face:start_index = 0LL ; + int64 mesh2d_edge(Mesh2d_edge, my_mesh_edge_N_nodes) ; + mesh2d_edge:demo = "Supports every standard CF property" ; + mesh2d_edge:cf_role = "edge_node_connectivity" ; + mesh2d_edge:start_index = 0LL ; + + // global attributes: + :Conventions = "CF-1.7" ; + } + + +Load +---- +.. |tagline: load| replace:: |different| - UGRID parsing is opt-in + +.. rubric:: |tagline: load| + +.. note:: UGRID loading support is limited to the NetCDF file format. + +While Iris' UGRID support remains :mod:`~iris.experimental`, parsing UGRID when +loading a file remains **optional**. To load UGRID data from a file into the +Iris mesh data model, use the +:const:`iris.experimental.ugrid.PARSE_UGRID_ON_LOAD` context manager: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris import load + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... loaded_cubelist = load(cubelist_path) + + # Sort CubeList to ensure consistent result. + >>> loaded_cubelist.sort(key=lambda cube: cube.name()) + >>> print(loaded_cubelist) + 0: edge_data / (K) (-- : 6; height: 3) + 1: face_data / (K) (-- : 2; height: 3) + +All the existing loading functionality still operates on UGRID-compliant +data - :class:`~iris.Constraint`\s, callbacks, :func:`~iris.load_cube` +etcetera: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris import Constraint, load_cube + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... ground_cubelist = load(cubelist_path, Constraint(height=0)) + ... face_cube = load_cube(cubelist_path, "face_data") + + # Sort CubeList to ensure consistent result. + >>> ground_cubelist.sort(key=lambda cube: cube.name()) + >>> print(ground_cubelist) + 0: edge_data / (K) (-- : 6) + 1: face_data / (K) (-- : 2) + + >>> print(face_cube) + face_data / (K) (-- : 2; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + Attributes: + Conventions 'CF-1.7' + +.. note:: + + We recommend caution if constraining on coordinates associated with a + :class:`~iris.experimental.ugrid.Mesh`. An individual coordinate value + might not be shared by any other data points, and using a coordinate range + will demand notably higher performance given the size of the dimension + versus structured grids + (:ref:`see the data model detail `). + +The :func:`iris.experimental.ugrid.load_mesh` and +:func:`~iris.experimental.ugrid.load_meshes` functions allow only +:class:`~iris.experimental.ugrid.Mesh`\es to be loaded from a file without +creating any associated :class:`~iris.cube.Cube`\s: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris.experimental.ugrid import load_mesh + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... loaded_mesh = load_mesh(cubelist_path) + + >>> print(loaded_mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + shape(5,)> + shape(5,)> + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: shape(6, 2)> + edge coordinates + shape(6,)> + shape(6,)> + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: shape(2, 4)> + face coordinates + shape(2,)> + shape(2,)> + long_name: 'my_mesh' + var_name: 'my_mesh' + +Plotting +-------- +.. |tagline: plotting| replace:: |different| - plot with GeoVista + +.. rubric:: |tagline: plotting| + +The Cartopy-Matplotlib combination is not optimised for displaying the high +number of irregular shapes associated with meshes. Thankfully mesh +visualisation is already popular in many other fields (e.g. CGI, gaming, +SEM microscopy), so there is a wealth of tooling available, which +:ref:`ugrid geovista` harnesses for cartographic plotting. + +GeoVista's default behaviour is to convert lat-lon information into full XYZ +coordinates so the data is visualised on the surface of a 3D globe. The plots +are interactive by default, so it's easy to explore the data in detail. + +2D projections have also been demonstrated in proofs of concept, and will +be added to API in the near future. + +This first example uses GeoVista to plot the ``face_cube`` that we created +earlier: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from geovista import GeoPlotter, Transform + >>> from geovista.common import to_xyz + + + # We'll re-use this to plot some real global data later. + >>> def cube_faces_to_polydata(cube): + ... lons, lats = cube.mesh.node_coords + ... face_node = cube.mesh.face_node_connectivity + ... indices = face_node.indices_by_location() + ... + ... mesh = Transform.from_unstructured( + ... lons.points, + ... lats.points, + ... indices, + ... data=cube.data, + ... name=f"{cube.name()} / {cube.units}", + ... start_index=face_node.start_index, + ... ) + ... return mesh + + >>> print(face_cube) + face_data / (K) (-- : 2; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + Attributes: + Conventions 'CF-1.7' + + # Convert our mesh+data to a PolyData object. + # Just plotting a single height level. + >>> face_polydata = cube_faces_to_polydata(face_cube[:, 0]) + >>> print(face_polydata) + PolyData (0x7ff4861ff4c0) + N Cells: 2 + N Points: 5 + X Bounds: 9.903e-01, 1.000e+00 + Y Bounds: 0.000e+00, 1.392e-01 + Z Bounds: 6.123e-17, 5.234e-02 + N Arrays: 2 + + # Create the GeoVista plotter and add our mesh+data to it. + >>> my_plotter = GeoPlotter() + >>> my_plotter.add_coastlines(color="black") + >>> my_plotter.add_base_layer(color="grey") + >>> my_plotter.add_mesh(face_polydata) + + # Centre the camera on the data. + >>> camera_region = to_xyz( + ... face_cube.coord("longitude").points, + ... face_cube.coord("latitude").points, + ... radius=3, + ... ) + >>> camera_pos = camera_region.mean(axis=0) + >>> my_plotter.camera.position = camera_pos + + >>> my_plotter.show() + + .. image:: images/plotting_basic.png + :alt: A GeoVista plot of the basic example Mesh. + + This artificial data makes West Africa rather chilly! + +Here's another example using a global cubed-sphere data set: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from iris import load_cube + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + # Demonstrating with a global data set. + # You could also download this file from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> file_path = get_data_path( + ... [ + ... "NetCDF", + ... "unstructured_grid", + ... "lfric_surface_mean.nc", + ... ] + ... ) + >>> with PARSE_UGRID_ON_LOAD.context(): + ... global_cube = load_cube(file_path, "tstar_sea") + >>> print(global_cube) + sea_surface_temperature / (K) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + >>> global_polydata = cube_faces_to_polydata(global_cube) + >>> print(global_polydata) + PolyData (0x7f761b536160) + N Cells: 13824 + N Points: 13826 + X Bounds: -1.000e+00, 1.000e+00 + Y Bounds: -1.000e+00, 1.000e+00 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 2 + + >>> my_plotter = GeoPlotter() + >>> my_plotter.add_coastlines() + >>> my_plotter.add_mesh(global_polydata, show_edges=True) + + >>> my_plotter.show() + + .. image:: images/plotting_global.png + :alt: A GeoVista plot of a global sea surface temperature Mesh. + +Region Extraction +----------------- +.. |tagline: region extraction| replace:: |different| - use GeoVista for mesh analysis + +.. rubric:: |tagline: region extraction| + +As described in :doc:`data_model`, indexing for a range along a +:class:`~iris.cube.Cube`\'s :meth:`~iris.cube.Cube.mesh_dim` will not provide +a contiguous region, since **position on the unstructured dimension is +unrelated to spatial position**. This means that subsetted +:class:`~iris.experimental.ugrid.MeshCoord`\s cannot be reliably interpreted +as intended, and subsetting a :class:`~iris.experimental.ugrid.MeshCoord` is +therefore set to return an :class:`~iris.coords.AuxCoord` instead - breaking +the link between :class:`~iris.cube.Cube` and +:class:`~iris.experimental.ugrid.Mesh`: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> edge_cube = my_cubelist.extract_cube("edge_data") + >>> print(edge_cube) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + # Sub-setted MeshCoords have become AuxCoords. + >>> print(edge_cube[:-1]) + edge_data / (K) (-- : 5; height: 3) + Dimension coordinates: + height - x + Auxiliary coordinates: + latitude x - + longitude x - + +Extracting a region therefore requires extra steps - to determine the spatial +position of the data points before they can be analysed as inside/outside the +selected region. The recommended way to do this is using tools provided by +:ref:`ugrid geovista`, which is optimised for performant mesh analysis. + +This approach centres around using :meth:`geovista.geodesic.BBox.enclosed` to +get the subset of the original mesh that is inside the +:class:`~geovista.geodesic.BBox`. This subset :class:`pyvista.PolyData` object +includes the original indices of each datapoint - the ``vtkOriginalCellIds`` +array, which can be used to index the original :class:`~iris.cube.Cube`. Since +we **know** that this subset :class:`~iris.cube.Cube` represents a regional +mesh, we then reconstruct a :class:`~iris.experimental.ugrid.Mesh` from the +:class:`~iris.cube.Cube`\'s :attr:`~iris.cube.Cube.aux_coords` using +:meth:`iris.experimental.ugrid.Mesh.from_coords`: + +.. + Not using doctest here as want to keep GeoVista as optional dependency. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from geovista import Transform + >>> from geovista.geodesic import BBox + >>> from iris import load_cube + >>> from iris.experimental.ugrid import Mesh, PARSE_UGRID_ON_LOAD + + # Need a larger dataset to demonstrate this operation. + # You could also download this file from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> file_path = get_data_path( + ... [ + ... "NetCDF", + ... "unstructured_grid", + ... "lfric_ngvat_2D_72t_face_half_levels_main_conv_rain.nc", + ... ] + ... ) + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... global_cube = load_cube(file_path, "conv_rain") + >>> print(global_cube) + surface_convective_rainfall_rate / (kg m-2 s-1) (-- : 72; -- : 864) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + point time + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 300 s + name lfric_ngvat_2D_72t_face_half_levels_main_conv_rain + online_operation instant + timeStamp 2020-Oct-18 21:18:35 GMT + title Created by xios + uuid b3dc0fb4-9828-4663-a5ac-2a5763280159 + + # Convert the Mesh to a GeoVista PolyData object. + >>> lons, lats = global_cube.mesh.node_coords + >>> face_node = global_cube.mesh.face_node_connectivity + >>> indices = face_node.indices_by_location() + >>> global_polydata = Transform.from_unstructured( + ... lons.points, lats.points, indices, start_index=face_node.start_index + ... ) + + # Define a region of 4 corners connected by great circles. + # Specialised sub-classes of BBox are also available e.g. panel/wedge. + >>> region = BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]) + # 'Apply' the region to the PolyData object. + >>> region_polydata = region.enclosed(global_polydata, preference="center") + # Get the remaining face indices, to use for indexing the Cube. + >>> indices = region_polydata["vtkOriginalCellIds"] + + >>> print(type(indices)) + + # 101 is smaller than the original 864. + >>> print(len(indices)) + 101 + >>> print(indices[:10]) + [ 6 7 8 9 10 11 18 19 20 21] + + # Use the face indices to subset the global cube. + >>> region_cube = global_cube[:, indices] + + # In this case we **know** the indices correspond to a contiguous + # region, so we will convert the sub-setted Cube back into a + # Cube-with-Mesh. + >>> new_mesh = Mesh.from_coords(*region_cube.coords(dimensions=1)) + >>> new_mesh_coords = new_mesh.to_MeshCoords(global_cube.location) + >>> for coord in new_mesh_coords: + ... region_cube.remove_coord(coord.name()) + ... region_cube.add_aux_coord(coord, 1) + + # A Mesh-Cube with a subset (101) of the original 864 faces. + >>> print(region_cube) + surface_convective_rainfall_rate / (kg m-2 s-1) (-- : 72; -- : 101) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + point time + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 300 s + name lfric_ngvat_2D_72t_face_half_levels_main_conv_rain + online_operation instant + timeStamp 2020-Oct-18 21:18:35 GMT + title Created by xios + uuid b3dc0fb4-9828-4663-a5ac-2a5763280159 + +Regridding +---------- +.. |tagline: regridding| replace:: |different| - use iris-esmf-regrid for mesh regridders + +.. rubric:: |tagline: regridding| + +Regridding to or from a mesh requires different logic than Iris' existing +regridders, which are designed for structured grids. For this we recommend +ESMF's powerful regridding tools, which integrate with Iris' mesh data model +via the :ref:`ugrid iris-esmf-regrid` package. + +.. todo: inter-sphinx links when available. + +Regridding is achieved via the +:class:`esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder` +and +:class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder` +classes. Regridding from a source :class:`~iris.cube.Cube` to a target +:class:`~iris.cube.Cube` involves initialising and then calling one of these +classes. Initialising is done by passing in the source and target +:class:`~iris.cube.Cube` as arguments. The regridder is then called by passing +the source :class:`~iris.cube.Cube` as an argument. We can demonstrate this +with the +:class:`~esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder`: + +.. + Not using doctest here as want to keep iris-esmf-regrid as optional dependency. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from esmf_regrid.experimental.unstructured_scheme import MeshToGridESMFRegridder + >>> from iris import load, load_cube + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + # You could also download these files from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> mesh_file = get_data_path( + ... ["NetCDF", "unstructured_grid", "lfric_surface_mean.nc"] + ... ) + >>> grid_file = get_data_path( + ... ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] + ... ) + + # Load a list of cubes defined on the same Mesh. + >>> with PARSE_UGRID_ON_LOAD.context(): + ... mesh_cubes = load(mesh_file) + + # Extract a specific cube. + >>> mesh_cube1 = mesh_cubes.extract_cube("sea_surface_temperature") + >>> print(mesh_cube1) + sea_surface_temperature / (K) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + # Load the target grid. + >>> sample_grid = load_cube(grid_file) + >>> print(sample_grid) + sample_grid / (unknown) (latitude: 180; longitude: 360) + Dimension coordinates: + latitude x - + longitude - x + Attributes: + Conventions 'CF-1.7' + + # Initialise the regridder. + >>> rg = MeshToGridESMFRegridder(mesh_cube1, sample_grid) + + # Regrid the mesh cube cube. + >>> result1 = rg(mesh_cube1) + >>> print(result1) + sea_surface_temperature / (K) (-- : 1; latitude: 180; longitude: 360) + Dimension coordinates: + latitude - x - + longitude - - x + Auxiliary coordinates: + time x - - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + +.. note:: + + **All** :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` are + retained when regridding, so watch out for any attributes that reference + the format (there are several in these examples) - you may want to manually + remove them to avoid later confusion. + +The initialisation process is computationally expensive so we use caching to +improve performance. Once a regridder has been initialised, it can be used on +any :class:`~iris.cube.Cube` which has been defined on the same +:class:`~iris.experimental.ugrid.Mesh` (or on the same **grid** in the case of +:class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder`). +Since calling a regridder is usually a lot faster than initialising, reusing +regridders can save a lot of time. We can demonstrate the reuse of the +previously initialised regridder: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + # Extract a different cube defined on te same Mesh. + >>> mesh_cube2 = mesh_cubes.extract_cube("precipitation_flux") + >>> print(mesh_cube2) + precipitation_flux / (kg m-2 s-1) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + # Regrid the new mesh cube using the same regridder. + >>> result2 = rg(mesh_cube2) + >>> print(result2) + precipitation_flux / (kg m-2 s-1) (-- : 1; latitude: 180; longitude: 360) + Dimension coordinates: + latitude - x - + longitude - - x + Auxiliary coordinates: + time x - - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + +Support also exists for saving and loading previously initialised regridders - +:func:`esmf_regrid.experimental.io.save_regridder` and +:func:`~esmf_regrid.experimental.io.load_regridder` - so that they can be +re-used by future scripts. + +Equality +-------- +.. |tagline: equality| replace:: |unchanged| + +.. rubric:: |tagline: equality| + +:class:`~iris.experimental.ugrid.Mesh` comparison is supported, and comparing +two ':class:`~iris.experimental.ugrid.Mesh`-:class:`~iris.cube.Cube`\s' will +include a comparison of the respective +:class:`~iris.experimental.ugrid.Mesh`\es, with no extra action needed by the +user. + +.. note:: + + Keep an eye on memory demand when comparing large + :class:`~iris.experimental.ugrid.Mesh`\es, but note that + :class:`~iris.experimental.ugrid.Mesh`\ equality is enabled for lazy + processing (:doc:`/userguide/real_and_lazy_data`), so if the + :class:`~iris.experimental.ugrid.Mesh`\es being compared are lazy the + process will use less memory than their total size. + +Combining Cubes +--------------- +.. |tagline: combining cubes| replace:: |pending| + +.. rubric:: |tagline: combining cubes| + +Merging or concatenating :class:`~iris.cube.Cube`\s (described in +:doc:`/userguide/merge_and_concat`) with two different +:class:`~iris.experimental.ugrid.Mesh`\es is not possible - a +:class:`~iris.cube.Cube` must be associated with just a single +:class:`~iris.experimental.ugrid.Mesh`, and merge/concatenate are not yet +capable of combining multiple :class:`~iris.experimental.ugrid.Mesh`\es into +one. + +:class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s can still be merged/concatenated +on dimensions other than the :meth:`~iris.cube.Cube.mesh_dim`, since such +:class:`~iris.cube.Cube`\s will by definition share the same +:class:`~iris.experimental.ugrid.Mesh`. + +.. seealso:: + + You may wish to investigate + :func:`iris.experimental.ugrid.recombine_submeshes`, which can be used + for a very specific type of :class:`~iris.experimental.ugrid.Mesh` + combination not detailed here. + +Arithmetic +---------- +.. |tagline: arithmetic| replace:: |pending| + +.. rubric:: |tagline: arithmetic| + +:class:`~iris.cube.Cube` Arithmetic (described in :doc:`/userguide/cube_maths`) +has not yet been adapted to handle :class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s. + + +.. todo: + Enumerate other popular operations that aren't yet possible + (and are they planned soon?) + +.. |new| replace:: ✨ New +.. |unchanged| replace:: ♻️ Unchanged +.. |different| replace:: ⚠️ Different +.. |pending| replace:: 🚧 Support Pending \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/further_topics/ugrid/partner_packages.rst new file mode 100644 index 00000000000..8e36f4ffc2d --- /dev/null +++ b/docs/src/further_topics/ugrid/partner_packages.rst @@ -0,0 +1,100 @@ +.. _ugrid partners: + +Iris' Mesh Partner Packages +**************************** +Python is an easy to use language and has formed a very strong collaborative +scientific community, which is why Iris is written in Python. *Performant* +Python relies on calls down to low level languages like C, which is ideal for +structured grid work since +they can be directly represented as NumPy arrays. This is more difficult when +working with unstructured meshes where extra steps are needed to determine data +position (:ref:`see the data model detail `), and we need +to find ways of again passing the operations down to more optimised languages. + +The Iris team are therefore developing 'wrapper' packages, which make it quick +and easy to analyse Iris mesh data via some popular Python packages that use +powerful tools under the hood, working in C and other languages. + +These solutions have been placed in their own 'partner packages' for several +reasons: + +* Can be useful to others who are not using Iris. + + * Everyone working with multi-dimensional geographic datasets shares common + problems that need solving. + * Wider user base = stronger community = better solutions. + +* Only some Iris users will need them - they are **optional** Iris dependencies. + + * They introduce a lot of new API. + * They introduce new large dependencies that take time to install and need + disk space. + +Below you can learn more about the partner packages and how they are useful. +Specifics of what operations would require their installation can be found in: +:doc:`operations`. + +.. important:: **Experimental** + + As with Iris' mesh support, these packages are still in the + experimental stages. They would love your feedback, but as immature + packages their API, documentation, test coverage and CI are still + 'under construction'. + + +.. _`ugrid geovista`: + +`GeoVista`_ +=========== +.. image:: images/geovistalogo.svg + :width: 300 + :class: no-scaled-link + +.. rubric:: "Cartographic rendering and mesh analytics powered by `PyVista`_" + +PyVista is described as "VTK for humans" - VTK is a very powerful toolkit for +working with meshes, and PyVista brings that power into the Python ecosystem. +GeoVista in turn makes it easy to use PyVista specifically for cartographic +work, designed from the start with the Iris +:class:`~iris.experimental.ugrid.Mesh` in mind. + +Applications +------------ +* Interactively plot mesh data: + + * On a 3D globe. + * On your favourite projection. + +* Extract a specific region from a mesh. +* Combine multiple meshes into one. + +.. _`ugrid iris-esmf-regrid`: + +`iris-esmf-regrid`_ +=================== +.. image:: images/iris-esmf-regrid.svg + :width: 300 + :class: no-scaled-link + +.. rubric:: "A collection of structured and unstructured ESMF regridding schemes for Iris" + +ESMF provide a sophisticated, performant regridding utility that supports a +variety of regridding types with both structured grids and unstructured meshes, +and this also has a flexible Python interface - ESMPy. iris-esmf-regrid takes +advantage of having a specific use-case - regridding Iris +:class:`~iris.cube.Cube`\s - to provide ESMPy-Iris wrappers that make the +process as easy as possible, with highly optimised performance. + +Applications +------------ +* Regrid structured to unstructured. +* Regrid unstructured to structured. +* Regrid with dask integration, computing in parallel and maintaining data + laziness. +* | Save a prepared regridder for re-use in subsequent runs. + | Regridders can even be re-used on sources with different masks - a + significant efficiency gain. + +.. _GeoVista: https://github.com/bjlittle/geovista +.. _PyVista: https://docs.pyvista.org/index.html +.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid diff --git a/docs/src/index.rst b/docs/src/index.rst index 7518f948d88..d6270d60cc9 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -98,6 +98,15 @@ For **Iris 2.4** and earlier documentation please see the generated/gallery/index +.. toctree:: + :maxdepth: 1 + :caption: What's New in Iris + :hidden: + + whatsnew/latest + Archive + + .. toctree:: :maxdepth: 1 :caption: User Guide @@ -131,6 +140,7 @@ For **Iris 2.4** and earlier documentation please see the further_topics/metadata further_topics/lenient_metadata further_topics/lenient_maths + further_topics/ugrid/index .. toctree:: @@ -153,7 +163,6 @@ For **Iris 2.4** and earlier documentation please see the :hidden: generated/api/iris - whatsnew/index techpapers/index copyright votable_issues diff --git a/docs/src/installing.rst b/docs/src/installing.rst index e358bb42c98..37a8942ab38 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -85,10 +85,12 @@ local copy of Iris:: conda env create --force --file=requirements/ci/iris.yml conda activate iris-dev -The ``--force`` option is used when creating the environment, this is optional -and will force the any existing ``iris-dev`` conda environment to be deleted -first if present. This is useful when rebuilding your environment due to a -change in requirements. +.. note:: + + The ``--force`` option, used when creating the environment, first removes + any previously existing ``iris-dev`` environment of the same name. This is + particularly useful when rebuilding your environment due to a change in + requirements. The ``requirements/ci/iris.yml`` file defines the Iris development conda environment *name* and all the relevant *top level* `conda-forge` package diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst index 78490cd749d..e8a1744a44f 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -63,9 +63,9 @@ but with the data representing their difference: forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Attributes: - Conventions CF-1.5 - Model scenario E1 - source Data from Met Office Unified Model 6.05 + Conventions 'CF-1.5' + Model scenario 'E1' + source 'Data from Met Office Unified Model 6.05' .. note:: diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index ac66ff4e53b..980f1e132f4 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -53,8 +53,8 @@ For instance, suppose we have a cube: forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' In this case we have a 4 dimensional cube; @@ -84,8 +84,8 @@ we can pass the coordinate name and the aggregation definition to the mean model_level_number Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` @@ -143,8 +143,8 @@ These areas can now be passed to the ``collapsed`` method as weights: mean grid_longitude, grid_latitude Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Several examples of area averaging exist in the gallery which may be of interest, including an example on taking a :ref:`global area-weighted mean @@ -229,7 +229,7 @@ Printing this cube now shows that two extra coordinates exist on the cube: Cell methods: mean month, year Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index 5573c4aa8ee..f590485606b 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -79,7 +79,7 @@ Let's take the air temperature cube we've seen previously: mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' We can interpolate specific values from the coordinates of the cube: @@ -98,7 +98,7 @@ We can interpolate specific values from the coordinates of the cube: mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' As we can see, the resulting cube is scalar and has longitude and latitude coordinates with the values defined in our sample points. diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index 64a9bfd8229..d13dee369c1 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -172,8 +172,8 @@ output as this is the quickest way of inspecting the contents of a cube. Here is forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Using this output we can deduce that: diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index ec459dbbdf3..fb938975e8b 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -100,8 +100,8 @@ list indexing can be used: forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Notice that the result of printing a **cube** is a little more verbose than it was when printing a **list of cubes**. In addition to the very short summary @@ -304,13 +304,21 @@ for ease of calendar-based testing. >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') >>> print('All times :\n' + str(cube_all.coord('time'))) All times : - DimCoord([2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) >>> cube_11 = cube_all.extract(hour_11) >>> print('Selected times :\n' + str(cube_11.coord('time'))) Selected times : - DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' Secondly, the :class:`iris.time` module provides flexible time comparison facilities. An :class:`iris.time.PartialDateTime` object can be compared to @@ -335,7 +343,11 @@ The previous constraint example can now be written as: >>> print(iris.load_cube( ... iris.sample_data_path('uk_hires.pp'), ... 'air_potential_temperature' & the_11th_hour).coord('time')) - DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' It is common that a cube will need to be constrained between two given dates. In the following example we construct a time sequence representing the first @@ -355,10 +367,13 @@ day of every week for many years: :options: +NORMALIZE_WHITESPACE, +ELLIPSIS >>> print(long_ts.coord('time')) - DimCoord([2007-04-09 00:00:00, 2007-04-16 00:00:00, 2007-04-23 00:00:00, - ... - 2010-02-01 00:00:00, 2010-02-08 00:00:00, 2010-02-15 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., + 2010-02-08 00:00:00, 2010-02-15 00:00:00] + shape: (150,) + dtype: int64 + standard_name: 'time' Given two dates in datetime format, we can select all points between them. @@ -371,9 +386,13 @@ Given two dates in datetime format, we can select all points between them. ... time=lambda cell: d1 <= cell.point < d2) >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) >>> print(within_st_swithuns_07.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` objects. @@ -387,9 +406,13 @@ objects. ... time=lambda cell: pdt1 <= cell.point < pdt2) >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) >>> print(within_st_swithuns_07.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' A more complex example might require selecting points over an annually repeating date range. We can select points within a certain part of the year, in this case @@ -402,13 +425,19 @@ PartialDateTime this becomes simple: ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell < PartialDateTime(month=8, day=25)) >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) ... - >>> print(within_st_swithuns.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, - 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, - 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, - 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, - 2009-08-17 00:00:00, 2009-08-24 00:00:00], standard_name='time', calendar='gregorian') + >>> # Note: using summary(max_values) to show more of the points + >>> print(within_st_swithuns.coord('time').summary(max_values=100)) + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, + 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, + 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, + 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, + 2009-08-17 00:00:00, 2009-08-24 00:00:00] + shape: (17,) + dtype: int64 + standard_name: 'time' Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` and that they span multiple years. diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst index 74b47b258e1..c5924a61c65 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -33,9 +33,9 @@ We have already seen a basic string representation of a cube when printing: forecast_reference_time 2006-06-15 00:00:00 time 2006-06-15 00:00:00 Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s16i222 - source Data from Met Office Unified Model 6.01 + source 'Data from Met Office Unified Model 6.01' This representation is equivalent to passing the cube to the :func:`str` function. This function can be used on @@ -169,9 +169,9 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord>> # Save a cube list to a PP file, appending to the contents of the file >>> # if it already exists >>> iris.save(cubes, "myfile.pp", append=True) + >>> # Save a cube to netCDF, defaults to NETCDF4 file format >>> iris.save(cubes[0], "myfile.nc") >>> # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option @@ -73,6 +74,12 @@ See for more details on supported arguments for the individual savers. +.. note:: + + The existence of a keyword argument for one saver does not guarantee the + same works for all savers. For example, it isn't possible to pass an + ``append`` keyword argument to the netCDF saver (see :ref:`netcdf_save`). + Customising the Save Process ---------------------------- @@ -102,6 +109,7 @@ Similarly a PP field may need to be written out with a specific value for LBEXP. yield field iris.fileformats.pp.save_fields(tweaked_fields(cubes[0]), '/tmp/app.pp') +.. _netcdf_save: NetCDF ^^^^^^ diff --git a/docs/src/userguide/subsetting_a_cube.rst b/docs/src/userguide/subsetting_a_cube.rst index 1c68cafb8d5..5112d9689ad 100644 --- a/docs/src/userguide/subsetting_a_cube.rst +++ b/docs/src/userguide/subsetting_a_cube.rst @@ -30,7 +30,7 @@ A subset of a cube can be "extracted" from a multi-dimensional cube in order to Scalar coordinates: grid_latitude 0.0 degrees Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' In this example we start with a 3 dimensional cube, with dimensions of ``height``, ``grid_latitude`` and ``grid_longitude``, @@ -97,8 +97,8 @@ same way as loading with constraints: time 2009-11-19 10:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Cube Iteration diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 858f985ec6e..989198296ce 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -182,8 +182,7 @@ Cubes With no Vertical Coord can now be Exported to GRIB -------------------------------------------------------- Iris can now export cubes with no vertical coord to GRIB. -The solution is still under discussion: See -https://github.com/SciTools/iris/issues/519. +The solution is still under discussion: See :issue:`519`. .. _simple_cfg: diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 77458c70e93..771a6029542 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -35,8 +35,8 @@ This document explains the changes made to Iris for this release :ref:`incompatible changes ` and :ref:`deprecations `. - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! v3.0.1 (27 Jan 2021) @@ -617,7 +617,6 @@ v3.0.4 (22 July 2021) .. _xxHash: https://github.com/Cyan4973/xxHash .. _PyKE: https://pypi.org/project/scitools-pyke/ .. _@owena11: https://github.com/owena11 -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _readthedocs: https://readthedocs.org/ .. _CF Conventions and Metadata: https://cfconventions.org/ .. _flake8: https://flake8.pycqa.org/en/stable/ diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index 165e20d9bc2..bd046a0a24c 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -25,8 +25,8 @@ This document explains the changes made to Iris for this release * Multiple improvements to developer guide documentation. See entries in the :ref:`"Documentation" section `, below. - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! 📢 Announcements @@ -315,7 +315,6 @@ This document explains the changes made to Iris for this release .. _blacken-docs: https://github.com/asottile/blacken-docs .. _conda-lock: https://github.com/conda-incubator/conda-lock .. _deprecated numpy 1.20 aliases for builtin types: https://numpy.org/doc/1.20/release/1.20.0-notes.html#using-the-aliases-of-builtin-types-like-np-int-is-deprecated -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _Met Office: https://www.metoffice.gov.uk/ .. _numpy: https://numpy.org/doc/stable/release/1.20.0-notes.html .. |pre-commit.ci| image:: https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst new file mode 100644 index 00000000000..ef3764daa5a --- /dev/null +++ b/docs/src/whatsnew/3.2.rst @@ -0,0 +1,384 @@ +.. include:: ../common_links.inc + +v3.2 (15 Feb 2022) +****************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` v3.2.0 Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We've added experimental support for + :ref:`Meshes `, which can now be loaded and + attached to a cube. Mesh support is based on the `CF-UGRID`_ model. + * We've also dropped support for ``Python 3.7``. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and + `@aaronspring`_ who made their first contributions to Iris. The first of + many we hope! +#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 + + +✨ Features +=========== + +#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added + support for :ref:`unstructured meshes `. This involved + adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, + :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and + supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. + Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key + objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, + :class:`iris.experimental.ugrid.mesh.MeshCoord` and + :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. + A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID + type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that + reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use + on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the + property :attr:`~iris.cube.Cube.mesh` which returns a + :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the + :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + +#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, + for files using the `CF-UGRID`_ conventions. + The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` + provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be + returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. + (:pull:`4058`). + +#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf + files, using the `CF-UGRID`_ conventions. + The existing :meth:`iris.save` function now does this, when saving cubes with meshes. + A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving + :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data + (i.e. not attached to cubes). + (:pull:`4318` and :pull:`4339`). + +#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` + for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an + appropriate collection of :class:`iris.coords.Coord`\ s. + +#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries + containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) + +#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to + NetCDF files (was previously just :class:`~iris.cube.Cube` + :attr:`~iris.cube.Cube.data`). This is + important given the much greater size of + :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and + :class:`~iris.experimental.ugrid.mesh.Connectivity` + :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the + :ref:`mesh model `. (:pull:`4375`) + +#. `@bsherratt`_ added a ``threshold`` parameter to + :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) + +#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to + benchmark scripts. Also added a regridding benchmark that uses this data + (:pull:`4402`) + +#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). + (:issue:`4479`, :pull:`4483`) + +#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` + in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, + :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). + Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest + of Iris (:issue:`4523`). + +#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations + from `Metarelate`_. From now we intend to manage phenonemon translation + in Iris itself. (:pull:`4484`) + +#. `@pp-mo`_ improved printout of various cube data component objects : + :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, + :class:`~iris.coords.AncillaryVariable`, + :class:`~iris.experimental.ugrid.mesh.MeshCoord` and + :class:`~iris.experimental.ugrid.mesh.Mesh`. + These now all provide a more controllable ``summary()`` method, and + more convenient and readable ``str()`` and ``repr()`` output in the style of + the :class:`iris.cube.Cube`. + They also no longer realise lazy data. (:pull:`4499`). + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where + one cell's bounds align with the requested maximum and negative minimum, fixing + :issue:`4221`. (:pull:`4278`) + +#. `@bsherratt`_ fixed further edge cases in + :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) + +#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` + when a cube list contains cubes with different names, which will no longer report + "Cube names differ: var1 != var1" if var1 appears multiple times in the list + (:issue:`4342`, :pull:`4345`) + +#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid + parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) + +#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in + :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate + means for DJF (:pull:`4391`) + +#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` + to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata + (:issue:`4096`, :pull:`4387`) + +#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, + as well as some long-standing bugs with vertical coordinates and number + formats. (:pull:`4411`) + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if + no value match is found. (:pull:`4417`) + +#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning + when applied to a single point (:issue:`4250`, :pull:`4367`) + +#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and + :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve + array laziness, allowing efficient comparisons even with larger-than-memory + objects. (:pull:`4439`) + +#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new + coordinate bounds using minimum and maximum for unordered coordinates, + fixing :issue:`1528`. (:pull:`4315`) + +#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube + so that a cube with lazy data awaiting a unit conversion can be pickled. + (:issue:`4354`, :pull:`4377`) + +#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate + was mistakenly interpreted as a latitude, usually resulting in two 'latitude's + instead of one latitude and one longitude. + (:issue:`4460`, :pull:`4470`) + +#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` + from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) + +#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` + to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 + (:issue:`3305`, :pull:`4535`) + +#. `@lbdreyer`_ fixed a bug in :class:`iris.io.load_http` which was missing an import + (:pull:`4580`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy + data to take significantly longer than with real data. Benchmark + :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease + from >10s to 625ms. (:issue:`4280`, :pull:`4400`) + +#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` + to avoid unnecessary processing whenever a coordinate instance that already + exists within the cube is provided. (:pull:`4549`) + + +🔥 Deprecations +=============== + +#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` + the experimental ``equalise_attributes`` functionality was moved to the + :mod:`iris.util.equalise_attributes` function. Since then, calling the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised + an exception. (:issue:`3528`, :pull:`4496`) + +#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for + :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be + removed in a future release of Iris. (:pull:`4514`) + +#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not + believed to still be in use. The deprecation warnings invite users to contact + the Iris Developers if this isn't the case. (:pull:`4525`) + +#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and + :mod:`iris.fileformats.dot` as they are not believed to still be in use. The + deprecation warnings invite users to contact the Iris Developers if this + isn't the case. (:pull:`4515`) + +#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, + which was deprecated in ``v3.0``. Instead use + :class:`iris.common.resolve.Resolve`. For example, rather than calling + ``as_compatible_shape(src_cube, target_cube)`` replace with + ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) + +#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in + preference for :meth:`iris.cube.CubeList.extract_overlapping`. The + :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in + a future release of Iris. (:pull:`4541`) + +#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is + now replaced by `iris-emsf-regrid`_. (:pull:`4551`) + +#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. + Most features have a preferred exact alternative, as suggested, *except* + :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no + identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is + suggested as being quite close (though possibly slower). (:pull:`4548`) + + +🔗 Dependencies +=============== + +#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. + (:pull:`4331`) + +#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` + minimum pins. (:pull:`4356`) + +#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in + accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) + +#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ + backwards compatibility and deprecation policy schedule. (:pull:`4481`) + + +📚 Documentation +================ + +#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery + example. (:pull:`4120`) + +#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in + :ref:`get involved `. (:pull:`4307`) + +#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation + section `. (:pull:`4314`) + +#. `@wjbenfold`_ added explanation about the absence of | operator for + :class:`iris.Constraint` to :ref:`userguide loading section + ` and to api reference documentation. (:pull:`4321`) + +#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available + during :ref:`developer_running_tests`. (:pull:`4359`) + +#. `@lbdreyer`_ added a section to the release documentation outlining the role + of the :ref:`release_manager`. (:pull:`4413`) + +#. `@trexfeathers`_ encouraged contributors to include type hinting in code + they are working on - :ref:`code_formatting`. (:pull:`4390`) + +#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed + :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) + +#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide + loading section `. (:pull:`4462`) + +#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. + (:pull:`4476`) + +#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to + contributing to the docs ` to the docs. + (:pull:`4461`) + +#. `@pp-mo`_ improved and corrected docstrings of + :class:`iris.analysis.PointInCell`, making it clear what is the actual + calculation performed. (:pull:`4548`) + +#. `@pp-mo`_ removed reference in docstring of + :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) + :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. + (:pull:`4548`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ set the linkcheck to ignore + http://www.nationalarchives.gov.uk/doc/open-government-licence since this + always works locally, but never within CI. (:pull:`4307`) + +#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if + test data is missing (:pull:`4319`) + +#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being + marked stale. (:pull:`4317`) + +#. `@tkknight`_ added additional make targets for reducing the time of the + documentation build including ``html-noapi`` and ``html-quick``. + Useful for development purposes only. For more information see + :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) + +#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning + that sometimes interferes with unrelated tests. (:pull:`4330`) + +#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. + (:pull:`4349`) + +#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. + (:pull:`4347`). + +#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically + sort ``__all__`` entries into alphabetical order. (:pull:`4353`) + +#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy + deprecation warning. (:issue:`4374`, :pull:`4376`) + +#. `@akuhnregnier`_ removed addition of period from + :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests + using ``assertArrayAllClose`` following :issue:`3993`. + (:pull:`4421`) + +#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) + +#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) + +#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) + +#. `@aaronspring`_ exchanged ``dask`` with + ``dask-core`` in testing environments reducing the number of dependencies + installed for testing. (:pull:`4434`) + +#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, + :pull:`4444`) + +#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by + nose (:issue:`4431`, :pull:`4450`) + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@aaronspring: https://github.com/aaronspring +.. _@akuhnregnier: https://github.com/akuhnregnier +.. _@bsherratt: https://github.com/bsherratt +.. _@larsbarring: https://github.com/larsbarring +.. _@pdearnshaw: https://github.com/pdearnshaw +.. _@SimonPeatman: https://github.com/SimonPeatman +.. _@tinyendian: https://github.com/tinyendian + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html +.. _Metarelate: http://www.metarelate.net/ +.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ +.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid +.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba +.. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst new file mode 100644 index 00000000000..b9d5989bfc5 --- /dev/null +++ b/docs/src/whatsnew/dev.rst @@ -0,0 +1,94 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` |iris_version| Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. `@wjbenfold`_ added support for ``false_easting`` and ``false_northing`` to + :class:`~iris.coord_system.Mercator`. (:issue:`3107`, :pull:`4524`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ reverted part of the change from :pull:`3906` so that + :func:`iris.plot.plot` no longer defaults to placing a "Y" coordinate (e.g. + latitude) on the y-axis of the plot. (:issue:`4493`, :pull:`4601`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. `@rcomer`_ introduced the ``nc-time-axis >=1.4`` minimum pin, reflecting that + we no longer use the deprecated :class:`nc_time_axis.CalendarDateTime` + when plotting against time coordinates. (:pull:`4584`) + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + + + +.. comment + Whatsnew resources in alphabetical order: + + diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/dev.rst.template similarity index 83% rename from docs/src/whatsnew/latest.rst.template rename to docs/src/whatsnew/dev.rst.template index ced07780692..1b36d3f0b01 100644 --- a/docs/src/whatsnew/latest.rst.template +++ b/docs/src/whatsnew/dev.rst.template @@ -18,13 +18,13 @@ This document explains the changes made to Iris for this release * N/A - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! NOTE: section below is a template for bugfix patches ==================================================== - (Please remove this section when creating an initial 'latest.rst') + (Please remove this section when creating an initial 'dev.rst') v3.X.X (DD MMM YYYY) ==================== @@ -41,7 +41,7 @@ v3.X.X (DD MMM YYYY) NOTE: section above is a template for bugfix patches ==================================================== - (Please remove this section when creating an initial 'latest.rst') + (Please remove this section when creating an initial 'dev.rst') @@ -109,4 +109,4 @@ NOTE: section above is a template for bugfix patches .. comment Whatsnew resources in alphabetical order: -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose + diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index fabb0564843..7e0829da5b5 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -10,7 +10,8 @@ Iris versions. .. toctree:: :maxdepth: 1 - latest.rst + dev.rst + 3.2.rst 3.1.rst 3.0.rst 2.4.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst deleted file mode 100644 index 2787e0044f4..00000000000 --- a/docs/src/whatsnew/latest.rst +++ /dev/null @@ -1,289 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: :opticon:`report` |iris_version| Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light - :animate: fade-in - :open: - - The highlights for this minor release of Iris include: - - * We've added support for `UGRID`_ meshes which can now be loaded and attached - to a cube. - - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! - - -📢 Announcements -================ - -#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@akuhnregnier`_, - `@bsherratt`_ and `@aaronspring`_ who made their first contributions to Iris. - The first of many we hope! -#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 - - -✨ Features -=========== - -#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added - support for unstructured meshes, as described by `UGRID`_. This involved - adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, - :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and - supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. - Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key - objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, - :class:`iris.experimental.ugrid.mesh.MeshCoord` and - :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. - A :class:`iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID - type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that - reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use - on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the - property :attr:`~iris.cube.Cube.mesh` which returns a - :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the - :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. - -#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, - for files using the `UGRID`_ conventions. - The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` - provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be - returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. - (:pull:`4058`). - -#. `@pp-mo`_ added support to save cubes with meshes to netcdf files, using the - `UGRID`_ conventions. - The existing :meth:`iris.save` function now does this, when saving cubes with meshes. - A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving - :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data - (i.e. not attached to cubes). - (:pull:`4318` and :pull:`4339`). - -#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` - for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an - appropriate collection of :class:`iris.coords.Coord`\ s. - -#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries - containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) - -#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to - NetCDF files (was previously just :class:`~iris.cube.Cube` - :attr:`~iris.cube.Cube.data`). This is - important given the much greater size of - :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and - :class:`~iris.experimental.ugrid.mesh.Connectivity` - :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the - `UGRID`_ model. (:pull:`4375`) - -#. `@bsherratt`_ added a `threshold` parameter to - :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) - -#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to - benchmark scripts. Also added a regridding benchmark that uses this data - (:pull:`4402`) - - -🐛 Bugs Fixed -============= - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where - one cell's bounds align with the requested maximum and negative minimum, fixing - :issue:`4221`. (:pull:`4278`) - -#. `@bsherratt`_ fixed further edge cases in - :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) - -#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` - when a cube list contains cubes with different names, which will no longer report - "Cube names differ: var1 != var1" if var1 appears multiple times in the list - (:issue:`4342`, :pull:`4345`) - -#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid - parameter inverse_flattening = 0 (:issue: `4146`, :pull:`4348`) - -#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in - :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate - means for DJF (:pull:`4391`) - -#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` - to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata - (:issue:`4096`, :pull:`4387`) - -#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, - as well as some long-standing bugs with vertical coordinates and number - formats. (:pull:`4411`) - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if - no value match is found. (:pull:`4417`) - -#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning - when applied to a single point (:issue:`4250`, :pull:`4367`) - -#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and - :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve - array laziness, allowing efficient comparisons even with larger-than-memory - objects. (:pull:`4439`) - -#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new - coordinate bounds using minimum and maximum for unordered coordinates, - fixing :issue:`1528`. (:pull:`4315`) - -#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube - so that a cube with lazy data awaiting a unit conversion can be pickled. - (:issue:`4354 `, :pull:`4377`) - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy - data to take significantly longer than with real data. Benchmark - :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease - from >10s to 625ms. (:issue:`4280`, :pull:`4400`) - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. - (:pull:`4331`) - -#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` - minimum pins. (:pull:`4356`) - -#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in - accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) - - -📚 Documentation -================ - -#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery - example. (:pull:`4120`) - -#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in - :ref:`get involved `. (:pull:`4307`) - -#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation - section `. (:pull:`4314`) - -#. `@wjbenfold`_ added explanation about the absence of | operator for - :class:`iris.Constraint` to :ref:`userguide loading section - ` and to api reference documentation. (:pull:`4321`) - -#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available - during :ref:`developer_running_tests`. (:pull:`4359`) - -#. `@lbdreyer`_ added a section to the release documentation outlining the role - of the :ref:`release_manager`. (:pull:`4413`) - -#. `@trexfeathers`_ encouraged contributors to include type hinting in code - they are working on - :ref:`code_formatting`. (:pull:`4390`) - -#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed - :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) - -#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide - loading section `. (:pull:`4462`) - -#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. - (:pull:`4476`) - -#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to - contributing to the docs ` to the docs. - (:pull:`4461`) - - -💼 Internal -=========== - -#. `@trexfeathers`_ set the linkcheck to ignore - http://www.nationalarchives.gov.uk/doc/open-government-licence since this - always works locally, but never within CI. (:pull:`4307`) - -#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if - test data is missing (:pull:`4319`) - -#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being - marked stale. (:pull:`4317`) - -#. `@tkknight`_ added additional make targets for reducing the time of the - documentation build including ``html-noapi`` and ``html-quick``. - Useful for development purposes only. For more information see - :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) - -#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning - that sometimes interferes with unrelated tests. (:pull:`4330`) - -#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. - (:pull:`4349`) - -#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. - (:pull:`4347`). - -#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically - sort ``__all__`` entries into alphabetical order. (:pull:`4353`) - -#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy - deprecation warning. (:issue:`4374`, :pull:`4376`) - -#. `@akuhnregnier`_ removed addition of period from - :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests - using assertArrayAllClose following :issue:`3993`. - (:pull:`4421`) - -#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) - -#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) - -#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) - -#. `@aaronspring `_ exchanged `dask` with - `dask-core` in testing environments reducing the number of dependencies - installed for testing. (:pull:`4434`) - -#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, - :pull:`4444`) - -#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by - nose (:issue:`4431`, :pull:`4450`) - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - -.. _@aaronspring: https://github.com/aaronspring -.. _@akuhnregnier: https://github.com/akuhnregnier -.. _@bsherratt: https://github.com/bsherratt -.. _@larsbarring: https://github.com/larsbarring -.. _@pdearnshaw: https://github.com/pdearnshaw -.. _@tinyendian: https://github.com/tinyendian - -.. comment - Whatsnew resources in alphabetical order: - -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose -.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ -.. _sort-all: https://github.com/aio-libs/sort-all -.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 120000 index 00000000000..56aebe92dd4 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1 @@ +dev.rst \ No newline at end of file diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index 5a19f8d5b1b..bd761681927 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 77 - 2021-01-19T13:38:50Z + 78 + 2021-09-21T11:55:06Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -489,6 +489,13 @@ + + m2 s-2 + + + One-half the scalar product of the air velocity and vorticity vectors, where vorticity refers to the standard name atmosphere_upward_absolute_vorticity. Helicity is proportional to the strength of the flow, the amount of vertical wind shear, and the amount of turning in the flow. + + m2 s-1 35 @@ -2467,7 +2474,7 @@ 1 - The "beam_consistency_indicator" is the degree to which the magnitudes of a collection (ensemble) of acoustic signals from multiple underwater acoustic transceivers relate to each other. It is used as a data quality assessment parameter in ADCP (acoustic doppler current profiler) instruments and is frequently referred to as "correlation magnitude". Convention is that the larger the value, the higher the signal to noise ratio and therefore the better the quality of the current vector measurements; the maximum value of the indicator is 128. + The "beam_consistency_indicator" is the degree to which the received acoustic pulse is correlated with the transmitted pulse. It is used as a data quality assessment parameter in ADCP (acoustic doppler current profiler) instruments and is frequently referred to as "correlation magnitude". Convention is that the larger the value, the higher the signal to noise ratio and therefore the better the quality of the current vector measurements; the maximum value of the indicator is 128. @@ -2491,11 +2498,11 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. "Bedrock" is the solid Earth surface beneath land ice, ocean water or soil. The zero of bedrock altitude change is arbitrary. Isostatic adjustment is the vertical movement of the lithosphere due to changing surface ice and water loads. - + - "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. The quantity with standard name biological_taxon_identifier is the machine-readable identifier for the taxon registration in either WoRMS (the AphiaID) or ITIS (the taxonomic serial number or TSN), including namespace. The namespace strings are 'aphia:' or 'tsn:'. For example, Calanus finmarchicus is encoded as either 'aphia:104464' or 'tsn:85272'. For the marine domain WoRMS has more complete coverage and so aphia Ids are preferred. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. The quantity with standard name biological_taxon_lsid is the machine-readable identifier based on a taxon registration system using the syntax convention specified for the Life Science Identifier (LSID) - urn:lsid:<Authority>:<Namespace>:<ObjectID>[:<Version>]. This includes the reference classification in the element and these are restricted by the LSID governance. It is strongly recommended in CF that the authority chosen is World Register of Marine Species (WoRMS) for oceanographic data and Integrated Taxonomic Information System (ITIS) for freshwater and terrestrial data. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. This identifier is a narrower equivalent to the scientificNameID field in the Darwin Core Standard. @@ -2687,6 +2694,13 @@ "Amount" means mass per unit area. Zero change in land ice amount is an arbitrary level. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. + + kg + + + Zero change in land ice mass is an arbitrary level. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The horizontal domain over which the quantity is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. + + kg m-2 @@ -2922,7 +2936,7 @@ m-3 - "Colony forming unit" means an estimate of the viable bacterial or fungal numbers determined by counting colonies grown from a sample. "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Colony forming unit" means an estimate of the viable bacterial or fungal numbers determined by counting colonies grown from a sample. "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -3079,6 +3093,13 @@ Depth is the vertical distance below the surface. + + m + + + The phrase depth_at_base_of_unfrozen_ground is the instantaneous depth of the downward penetration of thaw from the ground surface at a given time. Permafrost is soil or rock that has remained at a temperature at or below zero degrees Celsius throughout the seasonal cycle for two or more consecutive years. The maximum measurable depth_at_base_of_unfrozen_ground value as recorded at the end of a thawing season corresponds to the permafrost_active_layer_thickness. + + m @@ -3142,6 +3163,13 @@ "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + + K + + + Sea surface temperature is usually abbreviated as "SST". It is the temperature of sea water near the surface (including the part under sea-ice, if any), not the skin or interface temperature, whose standard names are sea_surface_skin_temperature and surface_temperature, respectively. For the temperature of sea water at a particular depth or layer, a data variable of "sea_water_temperature" with a vertical coordinate axis should be used. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. + + Pa @@ -3723,6 +3751,13 @@ A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Flood water is water that covers land which is normally not covered by water. + + m s-1 + + + A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Friction velocity is a reference wind velocity derived from the relationship between air density and downward stress and is usually applied at a level close to the surface where stress is assumed to independent of height and approximately proportional to the square of mean velocity. + + m s-1 @@ -4577,6 +4612,13 @@ "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + + 1 + + + ice_volume_in_frozen_ground_in_excess_of_pore_volume_in_unfrozen_ground_expressed_as_fraction_of_frozen_ground_volume represents the fractional amount of "excess ice" in frozen ground. Excess ice is the volume of ice in the ground which exceeds the total pore volume that the ground would have under natural unfrozen conditions. Due to the presence of ground ice, the total water content of a frozen soil may exceed that corresponding to its normally consolidated state when unfrozen. As a result, upon thawing, a soil containing excess ice will settle under its own weight until it attains its consolidated state. Reference: van Everdingen, R. O. editor 1998: Multi-language glossary of permafrost and related ground ice terms. International Permafrost Association. + + m3 s-1 @@ -4588,7 +4630,7 @@ m s-1 - Sea water velocity is a vector quantity that is the speed at which water travels in a specified direction. The "indicative error" is an estimate of the quality of a sea water velocity profile measured using an ADCP (acoustic doppler current profiler). It is determined by differencing duplicate error velocity measurements made using different pairs of beams. The parameter is frequently referred to as the "error velocity". + Sea water velocity is a vector quantity that is the speed at which water travels in a specified direction. The "indicative error" is an estimate of the quality of a sea water velocity profile measured using an ADCP (acoustic doppler current profiler). It is determined by the difference between the vertical velocity calculated from two 3-beam solutions. The parameter is frequently referred to as the "error velocity". @@ -7671,6 +7713,13 @@ "Content" indicates a quantity per unit area. + + J Kg-1 + + + The lightning_potential_index measures the potential for charge generation and separation that leads to lightning flashes in convective thunderstorms. It is derived from the model simulated grid-scale updraft velocity and the mass mixing-ratios of liquid water, cloud ice, snow, and graupel. + + J @@ -8081,21 +8130,21 @@ kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as carbon is also referred to as "carbon biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as carbon is also referred to as "carbon biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as nitrogen is also referred to as "nitrogen biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as nitrogen is also referred to as "nitrogen biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -9449,6 +9498,13 @@ Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally; all contain a chlorin ring which gives the green pigment and a side chain whose structure varies. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -9610,6 +9666,13 @@ Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). Graupel consists of heavily rimed snow particles, often called snow pellets; often indistinguishable from very small soft hail except when the size convention that hail must have a diameter greater than 5 mm is adopted. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Graupel. There are also separate standard names for hail. Standard names for "graupel_and_hail" should be used to describe data produced by models that do not distinguish between hail and graupel. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -9918,6 +9981,13 @@ "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + + 1 @@ -10191,6 +10261,13 @@ The quantity with standard name mass_fraction_of_rainfall_falling_onto_surface_snow is the mass of rainfall falling onto snow as a fraction of the mass of rainfall falling within the area of interest. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. The surface called "surface" means the lower boundary of the atmosphere. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -10219,6 +10296,13 @@ "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Shallow convective cloud is nonprecipitating cumulus cloud with a cloud top below 3000m above the surface produced by the convection schemes in an atmosphere model. Some atmosphere models differentiate between shallow and deep convection. "Cloud liquid water" refers to the liquid phase of cloud water. A diameter of 0.2 mm has been suggested as an upper limit to the size of drops that shall be regarded as cloud drops; larger drops fall rapidly enough so that only very strong updrafts can sustain them. Any such division is somewhat arbitrary, and active cumulus clouds sometimes contain cloud drops much larger than this. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Cloud_drop. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -10755,14 +10839,14 @@ mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -12648,6 +12732,13 @@ The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of CFC11 is CFCl3. The IUPAC name for CFC11 is trichloro(fluoro)methane. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + mol kg-1 @@ -13054,6 +13145,13 @@ A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Flood water is water that covers land which is normally not covered by water. + + m s-1 + + + A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Friction velocity is a reference wind velocity derived from the relationship between air density and downward stress and is usually applied at a level close to the surface where stress is assumed to independent of height and approximately proportional to the square of mean velocity. + + W m-2 @@ -13345,7 +13443,7 @@ m-3 - "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -13436,21 +13534,21 @@ 1 - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". 1 - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". 1 - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". @@ -13464,7 +13562,7 @@ 1 - Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". @@ -17359,6 +17457,13 @@ The "reaction rate" is the rate at which the reactants of a chemical reaction form the products. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The rate of "hydroxyl radical destruction due to reaction with nmvoc" is the nmvoc reactivity with regard to reactions with OH. It is the weighted sum of the reactivity of all individual nmvoc species with OH. The chemical formula for the hydroxyl radical is OH. In chemistry, a "radical" is a highly reactive, and therefore short lived, species. The abbreviation "nmvoc" means non methane volatile organic compounds; "nmvoc" is the term used in standard names to describe the group of chemical species having this classification that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + 1 + + + The phrase "ratio_of_X_to_Y" means X/Y. "X_volume" means the volume occupied by X within the grid cell. Pore volume is the volume of the porosity of the ground under natural, unfrozen conditions. This is often known as "ice saturation index". + + K s-1 @@ -17391,7 +17496,7 @@ 1 - Realization is used to label a dimension that can be thought of asa statistical sample, e.g., labelling members of a model ensemble. + Realization is used to label a dimension that can be thought of as a statistical sample, e.g., labelling members of a model ensemble. @@ -17597,6 +17702,13 @@ The sea_floor_depth_below_sea_surface is the vertical distance between the sea surface and the seabed as measured at a given point in space including the variance caused by tides and possibly waves. + + m + + + The average size of grains (also known as particles) in a sediment sample. + + 1 @@ -17681,6 +17793,13 @@ The term sea_ice_extent means the total area of all grid cells in which the sea ice area fraction equals or exceeds a threshold, often chosen to be 15 per cent. The threshold must be specified by supplying a coordinate variable or scalar coordinate variable with the standard name of sea_ice_area_fraction. The horizontal domain over which sea ice extent is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + + m + + + "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. An ice floe is a flat expanse of sea ice, generally taken to be less than 10 km across. ice_floe_diameter corresponds to the diameter of a circle with the same area as the ice floe. + + m @@ -17856,6 +17975,20 @@ Sea surface density is the density of sea water near the surface (including the part under sea-ice, if any). + + Pa + + + The surface called "sea surface" means the upper boundary of the ocean. "Surface stress" means the shear stress (force per unit area) exerted at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, surface stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "dissipation_of_sea_surface_waves" means the stress associated with sea surface waves dissipation processes such as whitecapping. + + + + Pa + + + The surface called "sea surface" means the upper boundary of the ocean. "Surface stress" means the shear stress (force per unit area) exerted at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, surface stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "dissipation_of_sea_surface_waves" means the stress associated with sea surface waves dissipation processes such as whitecapping. + + K @@ -18367,6 +18500,13 @@ Wave slope describes an aspect of sea surface wave geometry related to sea surface roughness. Mean square slope describes a derivation over multiple waves within a sea-state, for example calculated from moments of the wave directional spectrum. The phrase "y_slope" indicates that slope values are derived from vector components along the grid y-axis. + + m-1 + + + The wave directional spectrum can be written as a five dimensional function S(t,x,y,k,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), k is wavenumber and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. Wavenumber is the number of oscillations of a wave per unit distance. Wavenumber moments, M(n) of S1 can then be calculated as follows: M(n) = integral(S1 k^n dk), where k^n is k to the power of n. The mean wavenumber, k(1), is calculated as the ratio M(1)/M(0). + + s @@ -18451,6 +18591,27 @@ Sea surface wave variance spectral density is the variance of wave amplitude within a range of wave frequency. + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. + + + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. + + + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. + + degree @@ -19088,6 +19249,13 @@ "Content" indicates a quantity per unit area. The "soil content" of a quantity refers to the vertical integral from the surface down to the bottom of the soil model. For the content between specified levels in the soil, standard names including content_of_soil_layer are used. Soil carbon is returned to the atmosphere as the organic matter decays. The decay process takes varying amounts of time depending on the composition of the organic matter, the temperature and the availability of moisture. A carbon "soil pool" means the carbon contained in organic matter which has a characteristic period over which it decays and releases carbon into the atmosphere. "Slow soil pool" refers to the decay of organic matter in soil with a characteristic period of more than a hundred years under reference climate conditions of a temperature of 20 degrees Celsius and no water limitations. + + 1 + + + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Snow "viewable from above" refers to the snow on objects or the ground as viewed from above, which excludes, for example, falling snow flakes and snow obscured by a canopy, vegetative cover, or other features resting on the surface. + + kg m-2 @@ -19225,7 +19393,7 @@ K 85 - Soil temperature is the bulk temperature of the soil, not the surface (skin) temperature. "Soil" means the near-surface layer where plants sink their roots. For subsurface temperatures that extend beneath the soil layer or in areas where there is no surface soil layer, the standard name solid_earth_subsurface_temperature should be used. + Soil temperature is the bulk temperature of the soil, not the surface (skin) temperature. "Soil" means the near-surface layer where plants sink their roots. For subsurface temperatures that extend beneath the soil layer or in areas where there is no surface soil layer, the standard name temperature_in_ground should be used. @@ -19249,6 +19417,13 @@ A variable with the standard name of soil_type contains strings which indicate the character of the soil e.g. clay. These strings have not yet been standardised. Alternatively, the data variable may contain integers which can be translated to strings using flag_values and flag_meanings attributes. + + 1 + + + soil_water_ph is the measure of acidity of soil moisture, defined as the negative logarithm of the concentration of dissolved hydrogen ions in soil water. + + degree @@ -19284,13 +19459,6 @@ Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. - - K - - - The quantity with standard name solid_earth_subsurface_temperature is the temperature at any depth (or in a layer) of the "solid" earth, excluding surficial snow and ice (but not permafrost or soil). For temperatures in surface lying snow and ice, the more specific standard names temperature_in_surface_snow and land_ice_temperature should be used. For temperatures measured or modelled specifically in the soil layer (the near-surface layer where plants sink their roots) the standard name soil_temperature should be used. - - kg m-2 s-1 @@ -19410,6 +19578,13 @@ "specific" means per unit mass. Potential energy is the sum of the gravitational potential energy relative to the geoid and the centripetal potential energy. (The geopotential is the specific potential energy.) + + J kg-1 K-1 + + + Thermal capacity, or heat capacity, is the amount of heat energy required to increase the temperature of 1 kg of material by 1 K. It is a property of the material. + + J kg-1 K-1 @@ -19470,28 +19645,28 @@ day - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". @@ -19627,6 +19802,13 @@ "Sea surface height" is a time-varying quantity. The steric change in sea surface height is the change in height that a water column of standard temperature zero degrees Celsius and practical salinity S=35.0 would undergo when its temperature and salinity are changed to the observed values. The sum of the quantities with standard names thermosteric_change_in_sea_surface_height and halosteric_change_in_sea_surface_height is the total steric change in the water column height, which has the standard name of steric_change_in_sea_surface_height. The sum of the quantities with standard names sea_water_mass_per_unit_area_expressed_as_thickness and steric_change_in_sea_surface_height is the total thickness of the sea water column. + + m s-1 + + + Storm motion speed is defined as a two dimensional velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) It is defined as the average speed of a supercell, and the direction the storm will move from. It is not dependent on the orientation of the ground-relative winds. Storm motion speed generally follows the methodology outlined in Bunkers et al. (2000). + + 1 @@ -19928,6 +20110,20 @@ The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed eastward (negative westward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Viscosity" means the stress associated with viscous effects at the sea surface and is equivalent to the turbulent stress just outside the viscous sublayer. + + + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with form drag over sea surface waves. + + W m-2 @@ -20068,6 +20264,20 @@ The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Viscosity" means the stress associated with viscous effects at the sea surface and is equivalent to the turbulent stress just outside the viscous sublayer. + + + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with form drag over sea surface waves. + + W m-2 @@ -23323,6 +23533,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "anomaly" means difference from climatology. The surface temperature is the (skin) temperature at the interface, not the bulk temperature of the medium above or below. + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted at the surface. An upward stress is an upward flux of momentum into the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Upward eastward" indicates the ZX component of a tensor. An upward eastward stress is an upward flux of eastward momentum, which accelerates the upper medium eastward and the lower medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with oscillatory motions of a wavy sea surface. + + W m-2 @@ -23596,6 +23813,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted at the surface. An upward stress is an upward flux of momentum into the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Upward northward" indicates the ZY component of a tensor. An upward northward stress is an upward flux of northward momentum, which accelerates the upper medium northward and the lower medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with oscillatory motions of a wavy sea surface. + + W m-2 122 E146 @@ -23799,6 +24023,13 @@ The quantity with standard name temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water is the heat carried by the transfer of water into the liquid ocean by the process of runoff. This quantity additionally includes melt water from sea ice and icebergs. It is calculated relative to the heat that would be transported by runoff water entering the sea at zero degrees Celsius. It is calculated as the product QrunoffCpTrunoff, where Q runoff is the mass flux of liquid runoff entering the sea water (kg m-2 s-1), Cp is the specific heat capacity of water, and Trunoff is the temperature in degrees Celsius of the runoff water. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Runoff is the liquid water which drains from land. If not specified, "runoff" refers to the sum of surface runoff and subsurface drainage. + + K + + + The temperature at any given depth (or in a layer) below the surface of the ground, excluding surficial snow and ice (but not permafrost or soil). For temperatures in surface lying snow and ice, the more specific standard names temperature_in_surface_snow and land_ice_temperature should be used. For temperatures measured or modelled specifically for the soil layer (the near-surface layer where plants sink their roots) the standard name soil_temperature should be used. + + K E238 @@ -28041,6 +28272,13 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. + + kg s-1 + + + The phrase "tendency_of_X" means derivative of X with respect to time. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The horizontal domain over which the quantity is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. + + kg s-1 @@ -29623,6 +29861,13 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Speed is the magnitude of velocity. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. + + W m-1 K-1 + + + Thermal conductivity is the constant k in the formula q = -k grad T where q is the heat transfer per unit time per unit area of a surface normal to the direction of transfer and grad T is the temperature gradient. Thermal conductivity is a property of the material. + + J m-2 @@ -31227,14 +31472,110 @@ - - integral_wrt_time_of_surface_downward_northward_stress + + biological_taxon_lsid + + + + temperature_in_ground + + + + surface_snow_density + + + + soot_content_of_surface_snow + + + + liquid_water_content_of_surface_snow + + + + surface_snow_thickness + + + + thermal_energy_content_of_surface_snow + + + + temperature_in_surface_snow integral_wrt_time_of_surface_downward_eastward_stress + + integral_wrt_time_of_surface_downward_northward_stress + + + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice + + + + atmosphere_upward_absolute_vorticity + + + + atmosphere_upward_relative_vorticity + + + + area_type + + + + area_type + + + + iron_growth_limitation_of_diazotrophic_phytoplankton + + + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + + + tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton + + + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + + + mass_fraction_of_liquid_precipitation_in_air + + + + mass_fraction_of_liquid_precipitation_in_air + + + + mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + + + air_pseudo_equivalent_potential_temperature + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton @@ -31263,42 +31604,6 @@ effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top - - mass_content_of_cloud_liquid_water_in_atmosphere_layer - - - - air_equivalent_potential_temperature - - - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - - - wave_frequency - - - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - - - tendency_of_troposphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - northward_heat_flux_in_air_due_to_eddy_advection @@ -31355,72 +31660,56 @@ atmosphere_mass_content_of_cloud_liquid_water - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - - - sea_water_velocity_to_direction - - - - sea_water_velocity_to_direction - - - - gross_primary_productivity_of_biomass_expressed_as_carbon - - - - eastward_water_vapor_flux_in_air + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - sea_water_velocity_from_direction + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - thickness_of_stratiform_snowfall_amount + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - lwe_thickness_of_stratiform_snowfall_amount + + mass_content_of_cloud_ice_in_atmosphere_layer - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + sea_water_velocity_to_direction - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + sea_water_velocity_to_direction - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + gross_primary_productivity_of_biomass_expressed_as_carbon - - atmosphere_net_upward_convective_mass_flux + + eastward_water_vapor_flux_in_air @@ -31435,94 +31724,6 @@ tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - thermal_energy_content_of_surface_snow - - - - liquid_water_content_of_surface_snow - - - - temperature_in_surface_snow - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - - - surface_snow_thickness - - - - surface_snow_density - - - - soot_content_of_surface_snow - - - - atmosphere_upward_absolute_vorticity - - - - atmosphere_upward_relative_vorticity - - - - area_type - - - - area_type - - - - iron_growth_limitation_of_diazotrophic_phytoplankton - - - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - - - tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - - - mass_fraction_of_liquid_precipitation_in_air - - - - mass_fraction_of_liquid_precipitation_in_air - - - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - - - air_pseudo_equivalent_potential_temperature - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - tendency_of_atmosphere_mass_content_of_water_vapor @@ -31611,256 +31812,68 @@ atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - moles_of_cfc11_per_unit_mass_in_sea_water - - - - atmosphere_moles_of_cfc11 - - - - tendency_of_atmosphere_moles_of_cfc113 - - - - atmosphere_moles_of_cfc113 - - - - tendency_of_atmosphere_moles_of_cfc114 - - - - atmosphere_moles_of_cfc114 - - - - tendency_of_atmosphere_moles_of_cfc115 - - - - atmosphere_moles_of_cfc115 - - - - tendency_of_atmosphere_moles_of_cfc12 - - - - atmosphere_moles_of_cfc12 - - - - tendency_of_atmosphere_moles_of_halon1202 - - - - atmosphere_moles_of_halon1202 - - - - tendency_of_atmosphere_moles_of_halon1211 - - - - atmosphere_moles_of_halon1211 - - - - tendency_of_atmosphere_moles_of_halon1301 - - - - atmosphere_moles_of_halon1301 - - - - tendency_of_atmosphere_moles_of_halon2402 - - - - atmosphere_moles_of_halon2402 - - - - tendency_of_atmosphere_moles_of_hcc140a - - - - effective_radius_of_convective_cloud_rain_particles - - - - tendency_of_troposphere_moles_of_hcc140a - - - - tendency_of_middle_atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_moles_of_hcfc22 - - - - atmosphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - - - lagrangian_tendency_of_atmosphere_sigma_coordinate - - - - lagrangian_tendency_of_atmosphere_sigma_coordinate - - - - diameter_of_ambient_aerosol_particles - - - - effective_radius_of_stratiform_cloud_ice_particles - - - - effective_radius_of_convective_cloud_ice_particles - - - - effective_radius_of_stratiform_cloud_graupel_particles - - - - effective_radius_of_stratiform_cloud_rain_particles - - - - effective_radius_of_convective_cloud_snow_particles - - - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_cloud_ice_in_atmosphere_layer - - - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_content_of_cloud_liquid_water_in_atmosphere_layer - - mass_concentration_of_mercury_dry_aerosol_particles_in_air + + air_equivalent_potential_temperature - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - stratiform_cloud_area_fraction + + wave_frequency - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - atmosphere_moles_of_hcc140a + + tendency_of_troposphere_moles_of_carbon_monoxide - - floating_ice_shelf_area_fraction + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - mole_fraction_of_inorganic_bromine_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - water_vapor_saturation_deficit_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - tendency_of_atmosphere_moles_of_carbon_monoxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition @@ -32015,104 +32028,152 @@ tendency_of_atmosphere_moles_of_cfc11 - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + moles_of_cfc11_per_unit_mass_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + atmosphere_moles_of_cfc11 - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + tendency_of_atmosphere_moles_of_hcc140a - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + effective_radius_of_convective_cloud_rain_particles - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + tendency_of_troposphere_moles_of_hcc140a - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + tendency_of_middle_atmosphere_moles_of_hcc140a - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + tendency_of_troposphere_moles_of_hcfc22 - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + tendency_of_atmosphere_moles_of_hcfc22 - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + atmosphere_moles_of_hcfc22 - - tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - platform_name + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - platform_id + + diameter_of_ambient_aerosol_particles - - platform_pitch + + effective_radius_of_stratiform_cloud_ice_particles - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + effective_radius_of_convective_cloud_ice_particles - - tendency_of_air_temperature_due_to_stratiform_precipitation + + effective_radius_of_stratiform_cloud_graupel_particles - - water_evaporation_amount_from_canopy + + effective_radius_of_stratiform_cloud_rain_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + effective_radius_of_convective_cloud_snow_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - atmosphere_mass_content_of_cloud_ice + + stratiform_cloud_area_fraction - - stratiform_precipitation_amount + + sea_water_velocity_from_direction - - tendency_of_atmosphere_moles_of_nitrous_oxide + + thickness_of_stratiform_snowfall_amount - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_convective_cloud_condensed_water + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + + + lwe_thickness_of_stratiform_snowfall_amount + + + + equivalent_thickness_at_stp_of_atmosphere_ozone_content + + + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + + + atmosphere_net_upward_convective_mass_flux + + + + mass_fraction_of_mercury_dry_aerosol_particles_in_air + + + + atmosphere_moles_of_hcc140a + + + + floating_ice_shelf_area_fraction + + + + atmosphere_moles_of_carbon_tetrachloride @@ -32127,12 +32188,144 @@ mole_fraction_of_noy_expressed_as_nitrogen_in_air - - tendency_of_atmosphere_moles_of_methane + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + mole_fraction_of_inorganic_bromine_in_air + + + + water_vapor_saturation_deficit_in_air + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + + + tendency_of_atmosphere_moles_of_carbon_tetrachloride + + + + tendency_of_atmosphere_moles_of_carbon_monoxide + + + + tendency_of_atmosphere_moles_of_cfc113 + + + + atmosphere_moles_of_cfc113 + + + + tendency_of_atmosphere_moles_of_cfc114 + + + + atmosphere_moles_of_cfc114 + + + + tendency_of_atmosphere_moles_of_cfc115 + + + + atmosphere_moles_of_cfc115 + + + + tendency_of_atmosphere_moles_of_cfc12 + + + + atmosphere_moles_of_cfc12 + + + + tendency_of_atmosphere_moles_of_halon1202 + + + + atmosphere_moles_of_halon1202 + + + + tendency_of_atmosphere_moles_of_halon1211 + + + + atmosphere_moles_of_halon1211 + + + + tendency_of_atmosphere_moles_of_halon1301 + + + + atmosphere_moles_of_halon1301 + + + + tendency_of_atmosphere_moles_of_halon2402 + + + + atmosphere_moles_of_halon2402 + + + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + + + tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + + + volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles + + + + atmosphere_mass_content_of_convective_cloud_condensed_water @@ -32207,6 +32400,78 @@ lwe_thickness_of_stratiform_precipitation_amount + + tendency_of_atmosphere_moles_of_methane + + + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + + + magnitude_of_sea_ice_displacement + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + + + surface_downwelling_longwave_flux_in_air + + + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + + + downwelling_radiative_flux_per_unit_wavelength_in_air + + + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + downwelling_radiance_per_unit_wavelength_in_air + + + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky @@ -32263,26 +32528,6 @@ surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - surface_water_evaporation_flux - - - - water_evapotranspiration_flux - - - - water_volume_transport_into_sea_water_from_rivers - - - - stratiform_graupel_flux - - - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - wood_debris_mass_content_of_carbon @@ -32311,18 +32556,6 @@ volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - platform_yaw - - - - platform_roll - - - - water_vapor_partial_pressure_in_air - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles @@ -32339,68 +32572,68 @@ integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - magnitude_of_sea_ice_displacement + + platform_yaw - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + platform_roll - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + water_vapor_partial_pressure_in_air - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + platform_name - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + platform_id - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + platform_pitch - - surface_downwelling_longwave_flux_in_air + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + tendency_of_air_temperature_due_to_stratiform_precipitation - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + water_evaporation_amount_from_canopy - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - downwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_mass_content_of_cloud_ice - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + stratiform_precipitation_amount - - downwelling_radiance_per_unit_wavelength_in_air + + tendency_of_atmosphere_moles_of_nitrous_oxide - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition @@ -32507,6 +32740,26 @@ lwe_stratiform_precipitation_rate + + surface_water_evaporation_flux + + + + water_evapotranspiration_flux + + + + water_volume_transport_into_sea_water_from_rivers + + + + stratiform_graupel_flux + + + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection @@ -32535,6 +32788,94 @@ tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + integral_wrt_time_of_surface_net_downward_shortwave_flux + + + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + + + ocean_heat_y_transport_due_to_parameterized_eddy_advection + + + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + + + ocean_heat_x_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + + + integral_wrt_time_of_toa_outgoing_longwave_flux + + + + integral_wrt_time_of_toa_net_downward_shortwave_flux + + + + integral_wrt_time_of_surface_net_downward_longwave_flux + + + + integral_wrt_time_of_surface_downward_sensible_heat_flux + + + + integral_wrt_time_of_surface_downward_latent_heat_flux + + + + integral_wrt_time_of_air_temperature_excess + + + + integral_wrt_time_of_air_temperature_deficit + + + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles @@ -32711,392 +33052,392 @@ surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + air_pressure_at_mean_sea_level - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + sea_floor_depth_below_mean_sea_level - - tendency_of_sea_surface_height_above_mean_sea_level + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_wind_wave_mean_period - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_wave_mean_period - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_swell_wave_mean_period - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + sea_surface_wind_wave_to_direction - - surface_geostrophic_eastward_sea_water_velocity + + sea_surface_swell_wave_to_direction - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + mass_content_of_water_in_soil_layer - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + mass_content_of_water_in_soil - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + sea_surface_wind_wave_significant_height - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + sea_surface_swell_wave_significant_height - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - air_pressure_at_mean_sea_level + + sea_surface_wave_significant_height - - sea_floor_depth_below_mean_sea_level + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - sea_surface_wind_wave_mean_period + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - sea_surface_wave_mean_period + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - sea_surface_swell_wave_mean_period + + number_concentration_of_ambient_aerosol_particles_in_air - - sea_surface_wind_wave_to_direction + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_swell_wave_to_direction + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - mass_content_of_water_in_soil_layer + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - mass_content_of_water_in_soil + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - sea_surface_wind_wave_significant_height + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - sea_surface_swell_wave_significant_height + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + surface_upward_mole_flux_of_carbon_dioxide - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + surface_downward_mole_flux_of_carbon_dioxide - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + atmosphere_mass_content_of_cloud_condensed_water - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + northward_water_vapor_flux_in_air - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + lwe_stratiform_snowfall_rate - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + stratiform_snowfall_amount - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + stratiform_rainfall_rate - - integral_wrt_time_of_toa_outgoing_longwave_flux + + stratiform_rainfall_flux - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + stratiform_rainfall_amount - - integral_wrt_time_of_surface_net_downward_longwave_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_time_of_surface_downward_latent_heat_flux + + tendency_of_sea_surface_height_above_mean_sea_level - - integral_wrt_time_of_air_temperature_excess + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - integral_wrt_time_of_air_temperature_deficit + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + surface_geostrophic_eastward_sea_water_velocity - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - stratiform_rainfall_rate + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - stratiform_rainfall_flux + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - stratiform_rainfall_amount + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles @@ -33139,22 +33480,6 @@ upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves - - mass_fraction_of_ozone_in_air - - - - mass_fraction_of_convective_cloud_condensed_water_in_air - - - - sea_surface_swell_wave_period - - - - surface_drag_coefficient_in_air - - specific_gravitational_potential_energy @@ -33175,6 +33500,14 @@ isotropic_longwave_radiance_in_air + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + stratiform_snowfall_flux @@ -33183,108 +33516,120 @@ thickness_of_stratiform_rainfall_amount - - sea_surface_wave_significant_height + + sea_surface_wind_wave_period - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_methyl_chloride - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + tendency_of_middle_atmosphere_moles_of_methane - - number_concentration_of_ambient_aerosol_particles_in_air + + sea_water_y_velocity - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + sea_water_x_velocity - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + mole_fraction_of_hypochlorous_acid_in_air - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_molecular_hydrogen - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methyl_chloride - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_content_of_water_vapor_in_atmosphere_layer - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + mass_content_of_water_in_atmosphere_layer - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + tendency_of_troposphere_moles_of_methyl_bromide - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + radiation_wavelength - - surface_upward_mole_flux_of_carbon_dioxide + + tendency_of_troposphere_moles_of_methane - - surface_downward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - atmosphere_mass_content_of_cloud_condensed_water + + mole_fraction_of_chlorine_monoxide_in_air - - northward_water_vapor_flux_in_air + + mole_fraction_of_chlorine_dioxide_in_air - - lwe_stratiform_snowfall_rate + + mass_fraction_of_ozone_in_air - - stratiform_snowfall_amount + + mass_fraction_of_convective_cloud_condensed_water_in_air - - sea_surface_wind_wave_period + + sea_surface_swell_wave_period - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + surface_drag_coefficient_in_air + + + + mass_content_of_cloud_condensed_water_in_atmosphere_layer + + + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water @@ -33347,98 +33692,6 @@ tendency_of_atmosphere_moles_of_methyl_chloride - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - - - tendency_of_middle_atmosphere_moles_of_methyl_chloride - - - - tendency_of_middle_atmosphere_moles_of_methane - - - - sea_water_y_velocity - - - - sea_water_x_velocity - - - - mole_fraction_of_hypochlorous_acid_in_air - - - - tendency_of_troposphere_moles_of_molecular_hydrogen - - - - tendency_of_troposphere_moles_of_methyl_chloride - - - - mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_water_in_atmosphere_layer - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - - - tendency_of_troposphere_moles_of_methyl_bromide - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - - - radiation_wavelength - - - - tendency_of_troposphere_moles_of_methane - - - - tendency_of_atmosphere_mass_content_of_water_due_to_advection - - - - mole_fraction_of_chlorine_monoxide_in_air - - - - mole_fraction_of_chlorine_dioxide_in_air - - - - mass_content_of_cloud_condensed_water_in_atmosphere_layer - - - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - surface_drag_coefficient_for_momentum_in_air diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index c29772aaac7..3e847acad7d 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -36,14 +36,18 @@ All the load functions share very similar arguments: * uris: - Either a single filename/URI expressed as a string, or an - iterable of filenames/URIs. + Either a single filename/URI expressed as a string or + :class:`pathlib.PurePath`, or an iterable of filenames/URIs. Filenames can contain `~` or `~user` abbreviations, and/or Unix shell-style wildcards (e.g. `*` and `?`). See the standard library function :func:`os.path.expanduser` and module :mod:`fnmatch` for more details. + .. warning:: + + If supplying a URL, only OPeNDAP Data Sources are supported. + * constraints: Either a single constraint, or an iterable of constraints. Each constraint can be either a string, an instance of @@ -89,6 +93,7 @@ def callback(cube, field, filename): import glob import itertools import os.path +import pathlib import threading import iris._constraints @@ -103,7 +108,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.2.dev0" +__version__ = "3.3.dev0" # Restrict the names imported when using "from iris import *" __all__ = [ @@ -241,7 +246,7 @@ def context(self, **kwargs): def _generate_cubes(uris, callback, constraints): """Returns a generator of cubes given the URIs and a callback.""" - if isinstance(uris, str): + if isinstance(uris, (str, pathlib.PurePath)): uris = [uris] # Group collections of uris by their iris handler @@ -285,7 +290,8 @@ def load(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -313,7 +319,8 @@ def load_cube(uris, constraint=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -352,7 +359,8 @@ def load_cubes(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -397,7 +405,8 @@ def load_raw(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 6758e9f55d1..bc12080523a 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -1809,7 +1809,8 @@ def key_func(coord): # Order the coordinates by hints, axis, and definition. for coord in sorted(coords, key=key_func): - if not cube.coord_dims(coord) and coord.shape == (1,): + dims = tuple(cube.coord_dims(coord)) + if not dims and coord.shape == (1,): # Extract the scalar coordinate data and metadata. scalar_defns.append(coord.metadata) # Because we know there's a single Cell in the @@ -1834,11 +1835,11 @@ def key_func(coord): # Extract the vector coordinate and metadata. if id(coord) in cube_aux_coord_ids: vector_aux_coords_and_dims.append( - _CoordAndDims(coord, tuple(cube.coord_dims(coord))) + _CoordAndDims(coord, dims) ) else: vector_dim_coords_and_dims.append( - _CoordAndDims(coord, tuple(cube.coord_dims(coord))) + _CoordAndDims(coord, dims) ) factory_defns = [] diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 68f86832f50..1e78a92fd14 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -207,7 +207,7 @@ def __init__(self, title, attributes): self.values = [] self.contents = [] for name, value in sorted(attributes.items()): - value = value_repr(value) + value = value_repr(value, quote_strings=True) value = iris.util.clip_string(value) self.names.append(name) self.values.append(value) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 01c12c748a7..b1a9e1d2598 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1394,7 +1394,7 @@ def _lazy_rms(array, axis, **kwargs): # all. Thus trying to use this aggregator with weights will currently # raise an error in dask due to the unexpected keyword `weights`, # rather than silently returning the wrong answer. - return da.sqrt(da.mean(array ** 2, axis=axis, **kwargs)) + return da.sqrt(da.mean(array**2, axis=axis, **kwargs)) @_build_dask_mdtol_function @@ -2691,14 +2691,6 @@ class UnstructuredNearest: .. Note:: Currently only supports regridding, not interpolation. - .. Note:: - This scheme performs essentially the same job as - :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. - That scheme is faster, but only works well on data in a limited - region of the globe, covered by a specified projection. - This approach is more rigorously correct and can be applied to global - datasets. - """ # Note: the argument requirements are simply those of the underlying @@ -2769,6 +2761,9 @@ class PointInCell: This class describes the point-in-cell regridding scheme for use typically with :meth:`iris.cube.Cube.regrid()`. + Each result datapoint is an average over all source points that fall inside + that (bounded) target cell. + The PointInCell regridder can regrid data from a source grid of any dimensionality and in any coordinate system. The location of each source point is specified by X and Y coordinates @@ -2786,8 +2781,12 @@ class PointInCell: def __init__(self, weights=None): """ - Point-in-cell regridding scheme suitable for regridding over one - or more orthogonal coordinates. + Point-in-cell regridding scheme suitable for regridding from a source + cube with X and Y coordinates all on the same dimensions, to a target + cube with bounded X and Y coordinates on separate X and Y dimensions. + + Each result datapoint is an average over all source points that fall + inside that (bounded) target cell. Optional Args: diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index ae162f6c538..8381185e58e 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -3,8 +3,18 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. +import functools +import cf_units +import numpy as np +import numpy.ma as ma + +from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid +from iris.analysis._regrid import RectilinearRegridder +import iris.analysis.cartography +import iris.coord_systems +from iris.util import _meshgrid class AreaWeightedRegridder: @@ -43,10 +53,6 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): the same coordinate system. """ - from iris.experimental.regrid import ( - _regrid_area_weighted_rectilinear_src_and_grid__prepare, - ) - # Snapshot the state of the source cube to ensure that the regridder is # impervious to external changes to the original cubes. self._src_grid = snapshot_grid(src_grid_cube) @@ -103,10 +109,6 @@ def __call__(self, cube): in the horizontal dimensions will be combined before regridding. """ - from iris.experimental.regrid import ( - _regrid_area_weighted_rectilinear_src_and_grid__perform, - ) - src_x, src_y = get_xy_dim_coords(cube) if (src_x, src_y) != self._src_grid: raise ValueError( @@ -130,3 +132,1007 @@ def __call__(self, cube): return _regrid_area_weighted_rectilinear_src_and_grid__perform( cube, _regrid_info, mdtol=self._mdtol ) + + +# +# Support routines, all originally in iris.experimental.regrid +# + + +def _get_xy_coords(cube): + """ + Return the x and y coordinates from a cube. + + This function will preferentially return a pair of dimension + coordinates (if there are more than one potential x or y dimension + coordinates a ValueError will be raised). If the cube does not have + a pair of x and y dimension coordinates it will return 1D auxiliary + coordinates (including scalars). If there is not one and only one set + of x and y auxiliary coordinates a ValueError will be raised. + + Having identified the x and y coordinates, the function checks that they + have equal coordinate systems and that they do not occupy the same + dimension on the cube. + + Args: + + * cube: + An instance of :class:`iris.cube.Cube`. + + Returns: + A tuple containing the cube's x and y coordinates. + + """ + # Look for a suitable dimension coords first. + x_coords = cube.coords(axis="x", dim_coords=True) + if not x_coords: + # If there is no x coord in dim_coords look for scalars or + # monotonic coords in aux_coords. + x_coords = [ + coord + for coord in cube.coords(axis="x", dim_coords=False) + if coord.ndim == 1 and coord.is_monotonic() + ] + if len(x_coords) != 1: + raise ValueError( + "Cube {!r} must contain a single 1D x " + "coordinate.".format(cube.name()) + ) + x_coord = x_coords[0] + + # Look for a suitable dimension coords first. + y_coords = cube.coords(axis="y", dim_coords=True) + if not y_coords: + # If there is no y coord in dim_coords look for scalars or + # monotonic coords in aux_coords. + y_coords = [ + coord + for coord in cube.coords(axis="y", dim_coords=False) + if coord.ndim == 1 and coord.is_monotonic() + ] + if len(y_coords) != 1: + raise ValueError( + "Cube {!r} must contain a single 1D y " + "coordinate.".format(cube.name()) + ) + y_coord = y_coords[0] + + if x_coord.coord_system != y_coord.coord_system: + raise ValueError( + "The cube's x ({!r}) and y ({!r}) " + "coordinates must have the same coordinate " + "system.".format(x_coord.name(), y_coord.name()) + ) + + # The x and y coordinates must describe different dimensions + # or be scalar coords. + x_dims = cube.coord_dims(x_coord) + x_dim = None + if x_dims: + x_dim = x_dims[0] + + y_dims = cube.coord_dims(y_coord) + y_dim = None + if y_dims: + y_dim = y_dims[0] + + if x_dim is not None and y_dim == x_dim: + raise ValueError( + "The cube's x and y coords must not describe the " + "same data dimension." + ) + + return x_coord, y_coord + + +def _within_bounds(src_bounds, tgt_bounds, orderswap=False): + """ + Determine which target bounds lie within the extremes of the source bounds. + + Args: + + * src_bounds (ndarray): + An (n, 2) shaped array of monotonic contiguous source bounds. + * tgt_bounds (ndarray): + An (n, 2) shaped array corresponding to the target bounds. + + Kwargs: + + * orderswap (bool): + A Boolean indicating whether the target bounds are in descending order + (True). Defaults to False. + + Returns: + Boolean ndarray, indicating whether each target bound is within the + extremes of the source bounds. + + """ + min_bound = np.min(src_bounds) - 1e-14 + max_bound = np.max(src_bounds) + 1e-14 + + # Swap upper-lower is necessary. + if orderswap is True: + upper, lower = tgt_bounds.T + else: + lower, upper = tgt_bounds.T + + return ((lower <= max_bound) * (lower >= min_bound)) * ( + (upper <= max_bound) * (upper >= min_bound) + ) + + +def _cropped_bounds(bounds, lower, upper): + """ + Return a new bounds array and corresponding slice object (or indices) of + the original data array, resulting from cropping the provided bounds + between the specified lower and upper values. The bounds at the + extremities will be truncated so that they start and end with lower and + upper. + + This function will return an empty NumPy array and slice if there is no + overlap between the region covered by bounds and the region from lower to + upper. + + If lower > upper the resulting bounds may not be contiguous and the + indices object will be a tuple of indices rather than a slice object. + + Args: + + * bounds: + An (n, 2) shaped array of monotonic contiguous bounds. + * lower: + Lower bound at which to crop the bounds array. + * upper: + Upper bound at which to crop the bounds array. + + Returns: + A tuple of the new bounds array and the corresponding slice object or + indices from the zeroth axis of the original array. + + """ + reversed_flag = False + # Ensure order is increasing. + if bounds[0, 0] > bounds[-1, 0]: + # Reverse bounds + bounds = bounds[::-1, ::-1] + reversed_flag = True + + # Number of bounds. + n = bounds.shape[0] + + if lower <= upper: + if lower > bounds[-1, 1] or upper < bounds[0, 0]: + new_bounds = bounds[0:0] + indices = slice(0, 0) + else: + # A single region lower->upper. + if lower < bounds[0, 0]: + # Region extends below bounds so use first lower bound. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + if upper > bounds[-1, 1]: + # Region extends above bounds so use last upper bound. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to + # upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by lower->upper. + new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) + # Replace first and last values with specified bounds. + new_bounds[0, 0] = lower + new_bounds[-1, 1] = upper + if reversed_flag: + indices = slice(n - (uindex + 1), n - lindex) + else: + indices = slice(lindex, uindex + 1) + else: + # Two regions [0]->upper, lower->[-1] + # [0]->upper + if upper < bounds[0, 0]: + # Region outside src bounds. + new_bounds_left = bounds[0:0] + indices_left = tuple() + slice_left = slice(0, 0) + else: + if upper > bounds[-1, 1]: + # Whole of bounds. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by [0]->upper. + new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) + # Replace last value with specified bound. + new_bounds_left[-1, 1] = upper + if reversed_flag: + indices_left = tuple(range(n - (uindex + 1), n)) + slice_left = slice(n - (uindex + 1), n) + else: + indices_left = tuple(range(0, uindex + 1)) + slice_left = slice(0, uindex + 1) + # lower->[-1] + if lower > bounds[-1, 1]: + # Region is outside src bounds. + new_bounds_right = bounds[0:0] + indices_right = tuple() + slice_right = slice(0, 0) + else: + if lower < bounds[0, 0]: + # Whole of bounds. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + # Extract the bounds in our region defined by lower->[-1]. + new_bounds_right = np.copy(bounds[lindex:, :]) + # Replace first value with specified bound. + new_bounds_right[0, 0] = lower + if reversed_flag: + indices_right = tuple(range(0, n - lindex)) + slice_right = slice(0, n - lindex) + else: + indices_right = tuple(range(lindex, n)) + slice_right = slice(lindex, None) + + if reversed_flag: + # Flip everything around. + indices_left, indices_right = indices_right, indices_left + slice_left, slice_right = slice_right, slice_left + + # Combine regions. + new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) + # Use slices if possible, but if we have two regions use indices. + if indices_left and indices_right: + indices = indices_left + indices_right + elif indices_left: + indices = slice_left + elif indices_right: + indices = slice_right + else: + indices = slice(0, 0) + + if reversed_flag: + new_bounds = new_bounds[::-1, ::-1] + + return new_bounds, indices + + +def _cartesian_area(y_bounds, x_bounds): + """ + Return an array of the areas of each cell given two arrays + of cartesian bounds. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array. + * x_bounds: + An (m, 2) shaped NumPy array. + + Returns: + An (n, m) shaped Numpy array of areas. + + """ + heights = y_bounds[:, 1] - y_bounds[:, 0] + widths = x_bounds[:, 1] - x_bounds[:, 0] + return np.abs(np.outer(heights, widths)) + + +def _spherical_area(y_bounds, x_bounds, radius=1.0): + """ + Return an array of the areas of each cell on a sphere + given two arrays of latitude and longitude bounds in radians. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array of latitide bounds in radians. + * x_bounds: + An (m, 2) shaped NumPy array of longitude bounds in radians. + * radius: + Radius of the sphere. Default is 1.0. + + Returns: + An (n, m) shaped Numpy array of areas. + + """ + return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) + + +def _get_bounds_in_units(coord, units, dtype): + """Return a copy of coord's bounds in the specified units and dtype.""" + # The bounds are cast to dtype before conversion to prevent issues when + # mixing float32 and float64 types. + return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) + + +def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): + """ + Return the weighted mean of an array over the specified axis + using the provided weights (if any) and a permitted fraction of + masked data. + + Args: + + * data (array-like): + Data to be averaged. + + * weights (array-like): + An array of the same shape as the data that specifies the contribution + of each corresponding data element to the calculated mean. + + Kwargs: + + * axis (int or tuple of ints): + Axis along which the mean is computed. The default is to compute + the mean of the flattened array. + + * mdtol (float): + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of masked data exceeds + mdtol. This fraction is weighted by the `weights` array if one is + provided. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the contributing elements of data are masked. + Defaults to 0. + + Returns: + Numpy array (possibly masked) or scalar. + + """ + if ma.is_masked(data): + res, unmasked_weights_sum = ma.average( + data, weights=weights, axis=axis, returned=True + ) + if mdtol < 1: + weights_sum = weights.sum(axis=axis) + frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) + mask_pt = frac_masked > mdtol + if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): + if np.isscalar(res): + res = ma.masked + elif ma.isMaskedArray(res): + res.mask |= mask_pt + else: + res = ma.masked_array(res, mask=mask_pt) + else: + res = np.average(data, weights=weights, axis=axis) + return res + + +def _regrid_area_weighted_array( + src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 +): + """ + Regrid the given data from its source grid to a new grid using + an area weighted mean to determine the resulting data values. + + .. note:: + + Elements in the returned array that lie either partially + or entirely outside of the extent of the source grid will + be masked irrespective of the value of mdtol. + + Args: + + * src_data: + An N-dimensional NumPy array. + * x_dim: + The X dimension within `src_data`. + * y_dim: + The Y dimension within `src_data`. + * weights_info: + The area weights information to be used for area-weighted + regridding. + + Kwargs: + + * mdtol: + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of missing data exceeds + mdtol. This fraction is calculated based on the area of masked cells + within each target cell. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the overlapping elements of the source grid are masked. + Defaults to 0. + + Returns: + The regridded data as an N-dimensional NumPy array. The lengths + of the X and Y dimensions will now match those of the target + grid. + + """ + ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) = weights_info + + ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) = index_info + + # Ensure we have x_dim and y_dim. + x_dim_orig = x_dim + y_dim_orig = y_dim + if y_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + y_dim = src_data.ndim - 1 + if x_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + x_dim = src_data.ndim - 1 + # Move y_dim and x_dim to last dimensions + if not x_dim == src_data.ndim - 1: + src_data = np.moveaxis(src_data, x_dim, -1) + if not y_dim == src_data.ndim - 2: + if x_dim < y_dim: + # note: y_dim was shifted along by one position when + # x_dim was moved to the last dimension + src_data = np.moveaxis(src_data, y_dim - 1, -2) + elif x_dim > y_dim: + src_data = np.moveaxis(src_data, y_dim, -2) + x_dim = src_data.ndim - 1 + y_dim = src_data.ndim - 2 + + # Create empty "pre-averaging" data array that will enable the + # src_data data coresponding to a given target grid point, + # to be stacked per point. + # Note that dtype is not preserved and that the array mask + # allows for regions that do not overlap. + new_shape = list(src_data.shape) + new_shape[x_dim] = result_x_extent + new_shape[y_dim] = result_y_extent + + # Use input cube dtype or convert values to the smallest possible float + # dtype when necessary. + dtype = np.promote_types(src_data.dtype, np.float16) + + # Axes of data over which the weighted mean is calculated. + axis = (y_dim, x_dim) + + # Use previously established indices + + src_area_datas_square = src_data[ + ..., square_data_indices_y, square_data_indices_x + ] + + _, src_area_datas_required = np.broadcast_arrays( + src_area_datas_square, src_area_datas_required + ) + + src_area_datas = np.where( + src_area_datas_required, src_area_datas_square, 0 + ) + + # Flag to indicate whether the original data was a masked array. + src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False + if src_masked: + src_area_masks_square = src_data.mask[ + ..., square_data_indices_y, square_data_indices_x + ] + src_area_masks = np.where( + src_area_datas_required, src_area_masks_square, True + ) + + else: + # If the weights were originally blank, set the weights to all 1 to + # avoid divide by 0 error and set the new data mask for making the + # values 0 + src_area_weights = np.where(blank_weights, 1, src_area_weights) + + new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) + + # Broadcast the weights array to allow numpy's ma.average + # to be called. + # Assign new shape to raise error on copy. + src_area_weights.shape = src_area_datas.shape[-3:] + # Broadcast weights to match shape of data. + _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) + + # Mask the data points + if src_masked: + src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) + + # Calculate weighted mean taking into account missing data. + new_data = _weighted_mean_with_mdtol( + src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol + ) + new_data = new_data.reshape(new_shape) + if src_masked: + new_data_mask = new_data.mask + + # Mask the data if originally masked or if the result has masked points + if ma.isMaskedArray(src_data): + new_data = ma.array( + new_data, + mask=new_data_mask, + fill_value=src_data.fill_value, + dtype=dtype, + ) + elif new_data_mask.any(): + new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) + else: + new_data = new_data.astype(dtype) + + # Restore data to original form + if x_dim_orig is None and y_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.squeeze(new_data, axis=y_dim) + elif y_dim_orig is None: + new_data = np.squeeze(new_data, axis=y_dim) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + elif x_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + elif x_dim_orig < y_dim_orig: + # move the x_dim back first, so that the y_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -1, x_dim_orig) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + else: + # move the y_dim back first, so that the x_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -2, y_dim_orig) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + + return new_data + + +def _regrid_area_weighted_rectilinear_src_and_grid__prepare( + src_cube, grid_cube +): + """ + First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + + Check inputs and calculate related info. The 'regrid info' returned + can be re-used over many 2d slices. + + """ + # Get the 1d monotonic (or scalar) src and grid coordinates. + src_x, src_y = _get_xy_coords(src_cube) + grid_x, grid_y = _get_xy_coords(grid_cube) + + # Condition 1: All x and y coordinates must have contiguous bounds to + # define areas. + if ( + not src_x.is_contiguous() + or not src_y.is_contiguous() + or not grid_x.is_contiguous() + or not grid_y.is_contiguous() + ): + raise ValueError( + "The horizontal grid coordinates of both the source " + "and grid cubes must have contiguous bounds." + ) + + # Condition 2: Everything must have the same coordinate system. + src_cs = src_x.coord_system + grid_cs = grid_x.coord_system + if src_cs != grid_cs: + raise ValueError( + "The horizontal grid coordinates of both the source " + "and grid cubes must have the same coordinate " + "system." + ) + + # Condition 3: cannot create vector coords from scalars. + src_x_dims = src_cube.coord_dims(src_x) + src_x_dim = None + if src_x_dims: + src_x_dim = src_x_dims[0] + src_y_dims = src_cube.coord_dims(src_y) + src_y_dim = None + if src_y_dims: + src_y_dim = src_y_dims[0] + if ( + src_x_dim is None + and grid_x.shape[0] != 1 + or src_y_dim is None + and grid_y.shape[0] != 1 + ): + raise ValueError( + "The horizontal grid coordinates of source cube " + "includes scalar coordinates, but the new grid does " + "not. The new grid must not require additional data " + "dimensions to be created." + ) + + # Determine whether to calculate flat or spherical areas. + # Don't only rely on coord system as it may be None. + spherical = ( + isinstance( + src_cs, + (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS), + ) + or src_x.units == "degrees" + or src_x.units == "radians" + ) + + # Get src and grid bounds in the same units. + x_units = cf_units.Unit("radians") if spherical else src_x.units + y_units = cf_units.Unit("radians") if spherical else src_y.units + + # Operate in highest precision. + src_dtype = np.promote_types(src_x.bounds.dtype, src_y.bounds.dtype) + grid_dtype = np.promote_types(grid_x.bounds.dtype, grid_y.bounds.dtype) + dtype = np.promote_types(src_dtype, grid_dtype) + + src_x_bounds = _get_bounds_in_units(src_x, x_units, dtype) + src_y_bounds = _get_bounds_in_units(src_y, y_units, dtype) + grid_x_bounds = _get_bounds_in_units(grid_x, x_units, dtype) + grid_y_bounds = _get_bounds_in_units(grid_y, y_units, dtype) + + # Create 2d meshgrids as required by _create_cube func. + meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) + + # Determine whether target grid bounds are decreasing. This must + # be determined prior to wrap_lons being called. + grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] + grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] + + # Wrapping of longitudes. + if spherical: + base = np.min(src_x_bounds) + modulus = x_units.modulus + # Only wrap if necessary to avoid introducing floating + # point errors. + if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( + base + modulus + ): + grid_x_bounds = iris.analysis.cartography.wrap_lons( + grid_x_bounds, base, modulus + ) + + # Determine whether the src_x coord has periodic boundary conditions. + circular = getattr(src_x, "circular", False) + + # Use simple cartesian area function or one that takes into + # account the curved surface if coord system is spherical. + if spherical: + area_func = _spherical_area + else: + area_func = _cartesian_area + + def _calculate_regrid_area_weighted_weights( + src_x_bounds, + src_y_bounds, + grid_x_bounds, + grid_y_bounds, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular=False, + ): + """ + Compute the area weights used for area-weighted regridding. + Args: + * src_x_bounds: + A NumPy array of bounds along the X axis defining the source grid. + * src_y_bounds: + A NumPy array of bounds along the Y axis defining the source grid. + * grid_x_bounds: + A NumPy array of bounds along the X axis defining the new grid. + * grid_y_bounds: + A NumPy array of bounds along the Y axis defining the new grid. + * grid_x_decreasing: + Boolean indicating whether the X coordinate of the new grid is + in descending order. + * grid_y_decreasing: + Boolean indicating whether the Y coordinate of the new grid is + in descending order. + * area_func: + A function that returns an (p, q) array of weights given an (p, 2) + shaped array of Y bounds and an (q, 2) shaped array of X bounds. + Kwargs: + * circular: + A boolean indicating whether the `src_x_bounds` are periodic. + Default is False. + Returns: + The area weights to be used for area-weighted regridding. + """ + # Determine which grid bounds are within src extent. + y_within_bounds = _within_bounds( + src_y_bounds, grid_y_bounds, grid_y_decreasing + ) + x_within_bounds = _within_bounds( + src_x_bounds, grid_x_bounds, grid_x_decreasing + ) + + # Cache which src_bounds are within grid bounds + cached_x_bounds = [] + cached_x_indices = [] + max_x_indices = 0 + for (x_0, x_1) in grid_x_bounds: + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) + cached_x_bounds.append(x_bounds) + cached_x_indices.append(x_indices) + # Keep record of the largest slice + if isinstance(x_indices, slice): + x_indices_size = np.sum(x_indices.stop - x_indices.start) + else: # is tuple of indices + x_indices_size = len(x_indices) + if x_indices_size > max_x_indices: + max_x_indices = x_indices_size + + # Cache which y src_bounds areas and weights are within grid bounds + cached_y_indices = [] + cached_weights = [] + max_y_indices = 0 + for j, (y_0, y_1) in enumerate(grid_y_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_y_decreasing: + y_0, y_1 = y_1, y_0 + y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) + cached_y_indices.append(y_indices) + # Keep record of the largest slice + if isinstance(y_indices, slice): + y_indices_size = np.sum(y_indices.stop - y_indices.start) + else: # is tuple of indices + y_indices_size = len(y_indices) + if y_indices_size > max_y_indices: + max_y_indices = y_indices_size + + weights_i = [] + for i, (x_0, x_1) in enumerate(grid_x_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds = cached_x_bounds[i] + x_indices = cached_x_indices[i] + + # Determine whether element i, j overlaps with src and hence + # an area weight should be computed. + # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case + # of wrapped longitudes. However if the src grid is not global + # (i.e. circular) this new cell would include a region outside of + # the extent of the src grid and thus the weight is therefore + # invalid. + outside_extent = x_0 > x_1 and not circular + if ( + outside_extent + or not y_within_bounds[j] + or not x_within_bounds[i] + ): + weights = False + else: + # Calculate weights based on areas of cropped bounds. + if isinstance(x_indices, tuple) and isinstance( + y_indices, tuple + ): + raise RuntimeError( + "Cannot handle split bounds " "in both x and y." + ) + weights = area_func(y_bounds, x_bounds) + weights_i.append(weights) + cached_weights.append(weights_i) + return ( + tuple(cached_x_indices), + tuple(cached_y_indices), + max_x_indices, + max_y_indices, + tuple(cached_weights), + ) + + ( + cached_x_indices, + cached_y_indices, + max_x_indices, + max_y_indices, + cached_weights, + ) = _calculate_regrid_area_weighted_weights( + src_x_bounds, + src_y_bounds, + grid_x_bounds, + grid_y_bounds, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular, + ) + + # Go further, calculating the full weights array that we'll need in the + # perform step and the indices we'll need to extract from the cube we're + # regridding (src_data) + + result_y_extent = len(grid_y_bounds) + result_x_extent = len(grid_x_bounds) + + # Total number of points + num_target_pts = result_y_extent * result_x_extent + + # Create empty array to hold weights + src_area_weights = np.zeros( + list((max_y_indices, max_x_indices, num_target_pts)) + ) + + # Built for the case where the source cube isn't masked + blank_weights = np.zeros((num_target_pts,)) + new_data_mask_basis = np.full( + (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ + ) + + # To permit fancy indexing, we need to store our data in an array whose + # first two dimensions represent the indices needed for the target cell. + # Since target cells can require a different number of indices, the size of + # these dimensions should be the maximum of this number. + # This means we need to track whether the data in + # that array is actually required and build those squared-off arrays + # TODO: Consider if a proper mask would be better + src_area_datas_required = np.full( + (max_y_indices, max_x_indices, num_target_pts), False + ) + square_data_indices_y = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + square_data_indices_x = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + + # Stack the weights for each target point and build the indices we'll need + # to extract the src_area_data + target_pt_ji = -1 + for j, y_indices in enumerate(cached_y_indices): + for i, x_indices in enumerate(cached_x_indices): + target_pt_ji += 1 + # Determine whether to mask element i, j based on whether + # there are valid weights. + weights = cached_weights[j][i] + if weights is False: + # Prepare for the src_data not being masked by storing the + # information that will let us fill the data with zeros and + # weights as one. The weighted average result will be the same, + # but we avoid dividing by zero. + blank_weights[target_pt_ji] = True + new_data_mask_basis[j, i] = True + else: + # Establish which indices are actually in y_indices and x_indices + if isinstance(y_indices, slice): + y_indices = list( + range( + y_indices.start, + y_indices.stop, + y_indices.step or 1, + ) + ) + else: + y_indices = list(y_indices) + + if isinstance(x_indices, slice): + x_indices = list( + range( + x_indices.start, + x_indices.stop, + x_indices.step or 1, + ) + ) + else: + x_indices = list(x_indices) + + # For the weights, we just need the lengths of these as we're + # dropping them into a pre-made array + + len_y = len(y_indices) + len_x = len(x_indices) + + src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights + + # To build the indices for the source cube, we need equal + # shaped array so we pad with 0s and record the need to mask + # them in src_area_datas_required + padded_y_indices = y_indices + [0] * (max_y_indices - len_y) + padded_x_indices = x_indices + [0] * (max_x_indices - len_x) + + square_data_indices_y[..., target_pt_ji] = np.array( + padded_y_indices + )[:, np.newaxis] + square_data_indices_x[..., target_pt_ji] = padded_x_indices + + src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True + + # Package up the return data + + weights_info = ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) + + index_info = ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) + + # Now return it + + return ( + src_x, + src_y, + src_x_dim, + src_y_dim, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + weights_info, + index_info, + ) + + +def _regrid_area_weighted_rectilinear_src_and_grid__perform( + src_cube, regrid_info, mdtol +): + """ + Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + + Perform the prepared regrid calculation on a single 2d cube. + + """ + ( + src_x, + src_y, + src_x_dim, + src_y_dim, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + weights_info, + index_info, + ) = regrid_info + + # Calculate new data array for regridded cube. + regrid = functools.partial( + _regrid_area_weighted_array, + x_dim=src_x_dim, + y_dim=src_y_dim, + weights_info=weights_info, + index_info=index_info, + mdtol=mdtol, + ) + + new_data = map_complete_blocks( + src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape + ) + + # Wrap up the data as a Cube. + regrid_callback = RectilinearRegridder._regrid + new_cube = RectilinearRegridder._create_cube( + new_data, + src_cube, + src_x_dim, + src_y_dim, + src_x, + src_y, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + regrid_callback, + ) + + # Slice out any length 1 dimensions. + indices = [slice(None, None)] * new_data.ndim + if src_x_dim is not None and new_cube.shape[src_x_dim] == 1: + indices[src_x_dim] = 0 + if src_y_dim is not None and new_cube.shape[src_y_dim] == 1: + indices[src_y_dim] = 0 + if 0 in indices: + new_cube = new_cube[tuple(indices)] + + return new_cube diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 127aec7c1e9..0b52f54568e 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -120,7 +120,7 @@ def _angle(p, q, r): mid_lons = np.deg2rad(q[0]) pr = _3d_xyz_from_latlon(r[0], r[1]) - _3d_xyz_from_latlon(p[0], p[1]) - pr_norm = np.sqrt(np.sum(pr ** 2, axis=0)) + pr_norm = np.sqrt(np.sum(pr**2, axis=0)) pr_top = pr[1] * np.cos(mid_lons) - pr[0] * np.sin(mid_lons) index = pr_norm == 0 diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index c6b33c56a4d..fc642497292 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -229,7 +229,7 @@ def compute_interp_weights(self, xi, method=None): xi_shape, method, indices, norm_distances, out_of_bounds = prepared # Allocate arrays for describing the sparse matrix. - n_src_values_per_result_value = 2 ** ndim + n_src_values_per_result_value = 2**ndim n_result_values = len(indices[0]) n_non_zero = n_result_values * n_src_values_per_result_value weights = np.ones(n_non_zero, dtype=norm_distances[0].dtype) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 409782f256a..4630f47967f 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -629,14 +629,10 @@ def curl(i_cube, j_cube, k_cube=None): # (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi) # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta) # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube) - if ( - y_coord.name() - not in [ - "latitude", - "grid_latitude", - ] - or x_coord.name() not in ["longitude", "grid_longitude"] - ): + if y_coord.name() not in [ + "latitude", + "grid_latitude", + ] or x_coord.name() not in ["longitude", "grid_longitude"]: raise ValueError( "Expecting latitude as the y coord and " "longitude as the x coord for spherical curl." diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 373487af532..f704468e332 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -335,7 +335,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): raise ValueError("Bounds must be [n,2] array") # fill in a new array of areas - radius_sqr = radius_of_earth ** 2 + radius_sqr = radius_of_earth**2 radian_lat_64 = radian_lat_bounds.astype(np.float64) radian_lon_64 = radian_lon_bounds.astype(np.float64) @@ -1010,8 +1010,8 @@ def _transform_distance_vectors_tolerance_mask( # Squared magnitudes should be equal to one within acceptable tolerance. # A value of atol=2e-3 is used, which corresponds to a change in magnitude # of approximately 0.1%. - sqmag_1_0 = u_one_t ** 2 + v_zero_t ** 2 - sqmag_0_1 = u_zero_t ** 2 + v_one_t ** 2 + sqmag_1_0 = u_one_t**2 + v_zero_t**2 + sqmag_0_1 = u_zero_t**2 + v_one_t**2 mask = np.logical_not( np.logical_and( np.isclose(sqmag_1_0, ones, atol=2e-3), diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 571a66b756b..1cbc90cc60a 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -19,6 +19,7 @@ import numpy as np from numpy import ma +from iris._deprecation import warn_deprecated import iris.analysis from iris.common import SERVICES, Resolve from iris.common.lenient import _lenient_client @@ -138,10 +139,35 @@ def intersection_of_cubes(cube, other_cube): An instance of :class:`iris.cube.Cube`. Returns: - A pair of :class:`iris.cube.Cube` instances in a tuple corresponding - to the original cubes restricted to their intersection. + A pair of :class:`iris.cube.Cube` instances in a tuple corresponding to + the original cubes restricted to their intersection. + + .. deprecated:: 3.2.0 + + Instead use :meth:`iris.cube.CubeList.extract_overlapping`. For example, + rather than calling + + .. code:: + + cube1, cube2 = intersection_of_cubes(cube1, cube2) + + replace with + + .. code:: + + cubes = CubeList([cube1, cube2]) + coords = ["latitude", "longitude"] # Replace with relevant coords + intersections = cubes.extract_overlapping(coords) + cube1, cube2 = (intersections[0], intersections[1]) """ + wmsg = ( + "iris.analysis.maths.intersection_of_cubes has been deprecated and will " + "be removed, please use iris.cube.CubeList.extract_overlapping " + "instead. See intersection_of_cubes docstring for more information." + ) + warn_deprecated(wmsg) + # Take references of the original cubes (which will be copied when # slicing later). new_cube_self = cube @@ -514,7 +540,7 @@ def power(data, out=None): return _math_op_common( cube, power, - cube.units ** exponent, + cube.units**exponent, new_dtype=new_dtype, in_place=in_place, ) diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 89dde1818bc..711e3c5bfbb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -168,10 +168,10 @@ def _ones_like(cube): covar = (s1 * s2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol ) - var_1 = (s1 ** 2).collapsed( + var_1 = (s1**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1 ) - var_2 = (s2 ** 2).collapsed( + var_2 = (s2**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_2 ) diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 29ae910e386..cb5f53f5f43 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -204,7 +204,7 @@ def func(field): lambda field: field not in ( "circular", - "src_dim", + "location_axis", "node_dimension", "edge_dimension", "face_dimension", diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 6eb79a65f90..12db64cafed 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -112,10 +112,10 @@ class Resolve: Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) @@ -130,10 +130,10 @@ class Resolve: Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(data.shape) (240, 37, 49) @@ -153,9 +153,9 @@ class Resolve: Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' Secondly, creating an *empty* ``resolver`` instance, that may be called *multiple* times with *different* :class:`~iris.cube.Cube` operands and *different* ``data``, @@ -2413,10 +2413,10 @@ def mapped(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) Dimension coordinates: @@ -2430,10 +2430,10 @@ def mapped(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> Resolve().mapped is None True >>> resolver = Resolve(cube1, cube2) @@ -2481,10 +2481,10 @@ def shape(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) Dimension coordinates: @@ -2498,10 +2498,10 @@ def shape(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> Resolve().shape is None True >>> Resolve(cube1, cube2).shape diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 300f49014a5..311ed35f44e 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -297,7 +297,10 @@ def as_cartopy_crs(self): return ccrs.Geodetic(self.as_cartopy_globe()) def as_cartopy_projection(self): - return ccrs.PlateCarree() + return ccrs.PlateCarree( + central_longitude=self.longitude_of_prime_meridian, + globe=self.as_cartopy_globe(), + ) def as_cartopy_globe(self): # Explicitly set `ellipse` to None as a workaround for @@ -1080,6 +1083,8 @@ def __init__( longitude_of_projection_origin=None, ellipsoid=None, standard_parallel=None, + false_easting=None, + false_northing=None, ): """ Constructs a Mercator coord system. @@ -1095,6 +1100,12 @@ def __init__( * standard_parallel: The latitude where the scale is 1. Defaults to 0.0 . + * false_easting: + X offset from the planar origin in metres. Defaults to 0.0. + + * false_northing: + Y offset from the planar origin in metres. Defaults to 0.0. + """ #: True longitude of planar origin in degrees. self.longitude_of_projection_origin = _arg_default( @@ -1107,12 +1118,20 @@ def __init__( #: The latitude where the scale is 1. self.standard_parallel = _arg_default(standard_parallel, 0) + #: X offset from the planar origin in metres. + self.false_easting = _arg_default(false_easting, 0) + + #: Y offset from the planar origin in metres. + self.false_northing = _arg_default(false_northing, 0) + def __repr__(self): res = ( "Mercator(longitude_of_projection_origin=" "{self.longitude_of_projection_origin!r}, " "ellipsoid={self.ellipsoid!r}, " - "standard_parallel={self.standard_parallel!r})" + "standard_parallel={self.standard_parallel!r}, " + "false_easting={self.false_easting!r}, " + "false_northing={self.false_northing!r})" ) return res.format(self=self) @@ -1123,6 +1142,8 @@ def as_cartopy_crs(self): central_longitude=self.longitude_of_projection_origin, globe=globe, latitude_true_scale=self.standard_parallel, + false_easting=self.false_easting, + false_northing=self.false_northing, ) def as_cartopy_projection(self): diff --git a/lib/iris/coords.py b/lib/iris/coords.py index db193d00462..b236d407dae 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -10,7 +10,7 @@ from abc import ABCMeta, abstractmethod from collections import namedtuple -from collections.abc import Iterator +from collections.abc import Container, Iterator import copy from itertools import chain, zip_longest import operator @@ -57,6 +57,10 @@ class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): _MODE_RDIV: "/", } + # Used by printout methods : __str__ and __repr__ + # Overridden in subclasses : Coord->'points', Connectivity->'indices' + _values_array_name = "data" + @abstractmethod def __init__( self, @@ -268,78 +272,332 @@ def _has_lazy_values(self): """ return self._values_dm.has_lazy_data() - def _repr_other_metadata(self): - fmt = "" - if self.long_name: - fmt = ", long_name={self.long_name!r}" - if self.var_name: - fmt += ", var_name={self.var_name!r}" - if len(self.attributes) > 0: - fmt += ", attributes={self.attributes}" - result = fmt.format(self=self) - return result + def summary( + self, + shorten=False, + max_values=None, + edgeitems=2, + linewidth=None, + precision=None, + convert_dates=True, + _section_indices=None, + ): + r""" + Make a printable text summary. + + Parameters + ---------- + shorten : bool, default = False + If True, produce an abbreviated one-line summary. + If False, produce a multi-line summary, with embedded newlines. + max_values : int or None, default = None + If more than this many data values, print truncated data arrays + instead of full contents. + If 0, print only the shape. + The default is 5 if :attr:`shorten`\ =True, or 15 otherwise. + This overrides ``numpy.get_printoptions['threshold']``\ . + linewidth : int or None, default = None + Character-width controlling line splitting of array outputs. + If unset, defaults to ``numpy.get_printoptions['linewidth']``\ . + edgeitems : int = 2 + Controls truncated array output. + Overrides ``numpy.getprintoptions['edgeitems']``\ . + precision : int or None, default = None + Controls number decimal formatting. + When :attr:`shorten`\ =True this is defaults to 3, in which case it + overrides ``numpy.get_printoptions()['precision']``\ . + convert_dates : bool, default = True + If the units has a calendar, then print array values as date + strings instead of the actual numbers. + + Returns + ------- + result : str + Output text, with embedded newlines when :attr:`shorten`\ =False. - def _str_dates(self, dates_as_numbers): - date_obj_array = self.units.num2date(dates_as_numbers) - kwargs = {"separator": ", ", "prefix": " "} - return np.core.arrayprint.array2string( - date_obj_array, formatter={"all": str}, **kwargs - ) - def __str__(self): - # Note: this method includes bounds handling code, but it only runs - # within Coord type instances, as only these allow bounds to be set. - if self.units.is_time_reference(): - fmt = ( - "{cls}({values}{bounds}" - ", standard_name={self.standard_name!r}" - ", calendar={self.units.calendar!r}{other_metadata})" + .. note:: + Arrays are formatted using :meth:`numpy.array2string`. Some aspects + of the array formatting are controllable in the usual way, via + :meth:`numpy.printoptions`, but others are overridden as detailed + above. + Control of those aspects is still available, but only via the call + arguments. + + """ + # NOTE: the *private* key "_section_indices" can be set to a dict, to + # return details of which (line, character) each particular section of + # the output text begins at. + # Currently only used by MeshCoord.summary(), which needs this info to + # modify the result string, for idiosyncratic reasons. + + def array_summary(data, n_max, n_edge, linewidth, precision): + # Return a text summary of an array. + # Take account of strings, dates and masked data. + result = "" + formatter = None + if convert_dates and self.units.is_time_reference(): + # Account for dates, if enabled. + # N.B. a time unit with a long time interval ("months" + # or "years") cannot be converted to a date using + # `num2date`, so gracefully fall back to printing + # values as numbers. + if not self.units.is_long_time_interval(): + # Otherwise ... replace all with strings. + if ma.is_masked(data): + mask = data.mask + else: + mask = None + data = np.array(self.units.num2date(data)) + data = data.astype(str) + # Masked datapoints do not survive num2date. + if mask is not None: + data = np.ma.masked_array(data, mask) + + if ma.is_masked(data): + # Masks are not handled by np.array2string, whereas + # MaskedArray.__str__ is using a private method to convert to + # objects. + # Our preferred solution is to convert to strings *and* fill + # with '--'. This is not ideal because numbers will not align + # with a common numeric format, but there is no *public* logic + # in numpy to arrange that, so let's not overcomplicate. + # It happens that array2string *also* does not use a common + # format (width) for strings, but we fix that below... + data = data.astype(str).filled("--") + + if data.dtype.kind == "U": + # Strings : N.B. includes all missing data + # find the longest. + length = max(len(str(x)) for x in data.flatten()) + # Pre-apply a common formatting width. + formatter = {"all": lambda x: str(x).ljust(length)} + + result = np.array2string( + data, + separator=", ", + edgeitems=n_edge, + threshold=n_max, + max_line_width=linewidth, + formatter=formatter, + precision=precision, ) - if self.units.is_long_time_interval(): - # A time unit with a long time interval ("months" or "years") - # cannot be converted to a date using `num2date` so gracefully - # fall back to printing points as numbers, not datetimes. - values = self._values + + return result + + units_str = str(self.units) + if self.units.calendar and not shorten: + units_str += f", {self.units.calendar} calendar" + title_str = f"{self.name()} / ({units_str})" + cls_str = type(self).__name__ + shape_str = str(self.shape) + + # Implement conditional defaults for control args. + if max_values is None: + max_values = 5 if shorten else 15 + precision = 3 if shorten else None + n_indent = 4 + indent = " " * n_indent + newline_indent = "\n" + indent + if linewidth is not None: + given_array_width = linewidth + else: + given_array_width = np.get_printoptions()["linewidth"] + using_array_width = given_array_width - n_indent * 2 + # Make a printout of the main data array (or maybe not, if lazy). + if self._has_lazy_values(): + data_str = "" + elif max_values == 0: + data_str = "[...]" + else: + data_str = array_summary( + self._values, + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, + ) + + # The output under construction, divided into lines for convenience. + output_lines = [""] + + def add_output(text, section=None): + # Append output text and record locations of named 'sections' + if section and _section_indices is not None: + # defined a named 'section', recording the current line number + # and character position as its start position + i_line = len(output_lines) - 1 + i_char = len(output_lines[-1]) + _section_indices[section] = (i_line, i_char) + # Split the text-to-add into lines + lines = text.split("\n") + # Add initial text (before first '\n') to the current line + output_lines[-1] += lines[0] + # Add subsequent lines as additional output lines + for line in lines[1:]: + output_lines.append(line) # Add new lines + + if shorten: + add_output(f"<{cls_str}: ") + add_output(f"{title_str} ", section="title") + + if data_str != "": + # Flatten to a single line, reducing repeated spaces. + def flatten_array_str(array_str): + array_str = array_str.replace("\n", " ") + array_str = array_str.replace("\t", " ") + while " " in array_str: + array_str = array_str.replace(" ", " ") + return array_str + + data_str = flatten_array_str(data_str) + # Adjust maximum-width to allow for the title width in the + # repr form. + current_line_len = len(output_lines[-1]) + using_array_width = given_array_width - current_line_len + # Work out whether to include a summary of the data values + if len(data_str) > using_array_width: + # Make one more attempt, printing just the *first* point, + # as this is useful for dates. + data_str = data_str = array_summary( + self._values[:1], + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, + ) + data_str = flatten_array_str(data_str) + data_str = data_str[:-1] + ", ...]" + if len(data_str) > using_array_width: + # Data summary is still too long : replace with array + # "placeholder" representation. + data_str = "[...]" + + if self.has_bounds(): + data_str += "+bounds" + + if self.shape != (1,): + # Anything non-scalar : show shape as well. + data_str += f" shape{shape_str}" + + # single-line output in 'shorten' mode + add_output(f"{data_str}>", section="data") + + else: + # Long (multi-line) output format. + add_output(f"{cls_str} : ") + add_output(f"{title_str}", section="title") + + def reindent_data_string(text, n_indent): + lines = [line for line in text.split("\n")] + indent = " " * (n_indent - 1) # allow 1 for the initial '[' + # Indent all but the *first* line. + line_1, rest_lines = lines[0], lines[1:] + rest_lines = ["\n" + indent + line for line in rest_lines] + result = line_1 + "".join(rest_lines) + return result + + data_array_str = reindent_data_string(data_str, 2 * n_indent) + + # NOTE: actual section name is variable here : data/points/indices + data_text = f"{self._values_array_name}: " + if "\n" in data_array_str: + # Put initial '[' here, and the rest on subsequent lines + data_text += "[" + newline_indent + indent + data_array_str[1:] else: - values = self._str_dates(self._values) - bounds = "" + # All on one line + data_text += data_array_str + + # N.B. indent section and record section start after that + add_output(newline_indent) + add_output(data_text, section="data") + if self.has_bounds(): - if self.units.is_long_time_interval(): - bounds_vals = self.bounds + # Add a bounds section : basically just like the 'data'. + if self._bounds_dm.has_lazy_data(): + bounds_array_str = "" + elif max_values == 0: + bounds_array_str = "[...]" else: - bounds_vals = self._str_dates(self.bounds) - bounds = ", bounds={vals}".format(vals=bounds_vals) - result = fmt.format( - self=self, - cls=type(self).__name__, - values=values, - bounds=bounds, - other_metadata=self._repr_other_metadata(), - ) - else: - result = repr(self) + bounds_array_str = array_summary( + self._bounds_dm.data, + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, + ) + bounds_array_str = reindent_data_string( + bounds_array_str, 2 * n_indent + ) - return result + bounds_text = "bounds: " + if "\n" in bounds_array_str: + # Put initial '[' here, and the rest on subsequent lines + bounds_text += ( + "[" + newline_indent + indent + bounds_array_str[1:] + ) + else: + # All on one line + bounds_text += bounds_array_str + + # N.B. indent section and record section start after that + add_output(newline_indent) + add_output(bounds_text, section="bounds") + + if self.has_bounds(): + shape_str += f" bounds{self._bounds_dm.shape}" + + # Add shape section (always) + add_output(newline_indent) + add_output(f"shape: {shape_str}", section="shape") + + # Add dtype section (always) + add_output(newline_indent) + add_output(f"dtype: {self.dtype}", section="dtype") + + for name in self._metadata_manager._fields: + if name == "units": + # This was already included in the header line + continue + val = getattr(self, name, None) + if isinstance(val, Container): + # Don't print empty containers, like attributes={} + show = bool(val) + else: + # Don't print properties when not present, or set to None, + # or False. + # This works OK as long as we are happy to treat all + # boolean properties as 'off' when False : Which happens to + # work for all those defined so far. + show = val is not None and val is not False + if show: + if name == "attributes": + # Use a multi-line form for this. + add_output(newline_indent) + add_output("attributes:", section="attributes") + max_attname_len = max(len(attr) for attr in val.keys()) + for attrname, attrval in val.items(): + attrname = attrname.ljust(max_attname_len) + if isinstance(attrval, str): + # quote strings + attrval = repr(attrval) + # and abbreviate really long ones + attrval = iris.util.clip_string(attrval) + attr_string = f"{attrname} {attrval}" + add_output(newline_indent + indent + attr_string) + else: + # add a one-line section for this property + # (aka metadata field) + add_output(newline_indent) + add_output(f"{name}: {val!r}", section=name) + + return "\n".join(output_lines) + + def __str__(self): + return self.summary() def __repr__(self): - # Note: this method includes bounds handling code, but it only runs - # within Coord type instances, as only these allow bounds to be set. - fmt = ( - "{cls}({self._values!r}{bounds}" - ", standard_name={self.standard_name!r}, units={self.units!r}" - "{other_metadata})" - ) - bounds = "" - # if coordinate, handle the bounds - if self.has_bounds(): - bounds = ", bounds=" + repr(self.bounds) - result = fmt.format( - self=self, - cls=type(self).__name__, - bounds=bounds, - other_metadata=self._repr_other_metadata(), - ) - return result + return self.summary(shorten=True) def __eq__(self, other): # Note: this method includes bounds handling code, but it only runs @@ -861,23 +1119,6 @@ def measure(self, measure): raise ValueError(emsg) self._metadata_manager.measure = measure - def __str__(self): - result = repr(self) - return result - - def __repr__(self): - fmt = ( - "{cls}({self.data!r}, " - "measure={self.measure!r}, standard_name={self.standard_name!r}, " - "units={self.units!r}{other_metadata})" - ) - result = fmt.format( - self=self, - cls=type(self).__name__, - other_metadata=self._repr_other_metadata(), - ) - return result - def cube_dims(self, cube): """ Return the cube dimensions of this CellMeasure. @@ -1303,6 +1544,8 @@ class Coord(_DimensionalMetadata): """ + _values_array_name = "points" + @abstractmethod def __init__( self, @@ -1603,14 +1846,6 @@ def has_lazy_bounds(self): result = self._bounds_dm.has_lazy_data() return result - def _repr_other_metadata(self): - result = super()._repr_other_metadata() - if self.coord_system: - result += ", coord_system={}".format(self.coord_system) - if self.climatological: - result += ", climatological={}".format(self.climatological) - return result - # Must supply __hash__ as Python 3 does not enable it if __eq__ is defined. # NOTE: Violates "objects which compare equal must have the same hash". # We ought to remove this, as equality of two coords can *change*, so they @@ -2512,12 +2747,6 @@ def collapsed(self, dims_to_collapse=None): coord.circular = False return coord - def _repr_other_metadata(self): - result = Coord._repr_other_metadata(self) - if self.circular: - result += ", circular=%r" % self.circular - return result - def _new_points_requirements(self, points): """ Confirm that a new set of coord points adheres to the requirements for diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 3e1e98d12d2..b456bd9663a 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -745,7 +745,7 @@ class Cube(CFVariableMixin): mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' See the :doc:`user guide` for more information. @@ -1451,39 +1451,53 @@ def coord_dims(self, coord): The (name of the) coord to look for. """ - - coord = self.coord(coord) - - # Search for existing coordinate (object) on the cube, faster lookup - # than equality - makes no functional difference. - matches = [ - (dim,) - for coord_, dim in self._dim_coords_and_dims - if coord_ is coord - ] - if not matches: - matches = [ - dims - for coord_, dims in self._aux_coords_and_dims - if coord_ is coord - ] - - # Search derived aux coords - if not matches: + name_provided = False + if isinstance(coord, str): + # Forced to look-up the coordinate if we only have the name. + coord = self.coord(coord) + name_provided = True + + coord_id = id(coord) + + # Dimension of dimension coordinate by object id + dims_by_id = {id(c): (d,) for c, d in self._dim_coords_and_dims} + # Check for id match - faster than equality check + match = dims_by_id.get(coord_id) + + if match is None: + # Dimension/s of auxiliary coordinate by object id + aux_dims_by_id = {id(c): d for c, d in self._aux_coords_and_dims} + # Check for id match - faster than equality + match = aux_dims_by_id.get(coord_id) + if match is None: + dims_by_id.update(aux_dims_by_id) + + if match is None and not name_provided: + # We may have an equivalent coordinate but not the actual + # cube coordinate instance - so forced to perform coordinate + # lookup to attempt to retrieve it + coord = self.coord(coord) + # Check for id match - faster than equality + match = dims_by_id.get(id(coord)) + + # Search derived aux coordinates + if match is None: target_metadata = coord.metadata - def match(factory): + def matcher(factory): return factory.metadata == target_metadata - factories = filter(match, self._aux_factories) + factories = filter(matcher, self._aux_factories) matches = [ factory.derived_dims(self.coord_dims) for factory in factories ] + if matches: + match = matches[0] - if not matches: + if match is None: raise iris.exceptions.CoordinateNotFoundError(coord.name()) - return matches[0] + return match def cell_measure_dims(self, cell_measure): """ @@ -3636,7 +3650,7 @@ def collapsed(self, coords, aggregator, **kwargs): mean month, year mean longitude Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 @@ -3871,7 +3885,7 @@ def aggregated_by(self, coords, aggregator, **kwargs): mean month, year mean year Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 """ @@ -4076,8 +4090,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Attributes: STASH m01s00i024 source \ -Data from Met Office Unified Model - um_version 7.6 +'Data from Met Office Unified Model' + um_version '7.6' >>> print(air_press.rolling_window('time', iris.analysis.MEAN, 3)) @@ -4102,8 +4116,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Attributes: STASH m01s00i024 source \ -Data from Met Office Unified Model - um_version 7.6 +'Data from Met Office Unified Model' + um_version '7.6' Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. @@ -4234,7 +4248,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): dates or times may optionally be supplied as datetime.datetime or cftime.datetime instances. * scheme: - The type of interpolation to use to interpolate from this + An instance of the type of interpolation to use to interpolate from this :class:`~iris.cube.Cube` to the given sample points. The interpolation schemes currently available in Iris are: @@ -4263,8 +4277,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (time: 3; model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(cube.coord('time')) - DimCoord([2009-11-19 10:00:00, 2009-11-19 11:00:00, \ -2009-11-19 12:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' >>> print(cube.coord('time').points) [349618. 349619. 349620.] >>> samples = [('time', 349618.5)] @@ -4273,8 +4290,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result.coord('time')) - DimCoord([2009-11-19 10:30:00], standard_name='time', \ -calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:30:00] + shape: (1,) + dtype: float64 + standard_name: 'time' >>> print(result.coord('time').points) [349618.5] >>> # For datetime-like coordinates, we can also use @@ -4285,8 +4305,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result2.coord('time')) - DimCoord([2009-11-19 10:30:00], standard_name='time', \ -calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:30:00] + shape: (1,) + dtype: float64 + standard_name: 'time' >>> print(result2.coord('time').points) [349618.5] >>> print(result == result2) @@ -4307,7 +4330,7 @@ def regrid(self, grid, scheme): * grid: A :class:`~iris.cube.Cube` that defines the target grid. * scheme: - The type of regridding to use to regrid this cube onto the + An instance of the type of regridding to use to regrid this cube onto the target grid. The regridding schemes in Iris currently include: * :class:`iris.analysis.Linear`\*, diff --git a/lib/iris/experimental/equalise_cubes.py b/lib/iris/experimental/equalise_cubes.py deleted file mode 100644 index 8be71750673..00000000000 --- a/lib/iris/experimental/equalise_cubes.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Experimental cube-adjusting functions to assist merge operations. - -""" - - -def equalise_attributes(cubes): - """ - Delete cube attributes that are not identical over all cubes in a group. - - .. warning:: - - This function is now **disabled**. - - The functionality has been moved to - :func:`iris.util.equalise_attributes`. - - """ - old = "iris.experimental.equalise_cubes.equalise_attributes" - new = "iris.util.equalise_attributes" - emsg = ( - f'The function "{old}" has been moved.\n' - f'Please replace "{old}()" with "{new}()".' - ) - raise Exception(emsg) diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 83fd761973d..7c952934695 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -20,8 +20,17 @@ from osgeo import gdal, osr import iris +from iris._deprecation import warn_deprecated import iris.coord_systems +wmsg = ( + "iris.experimental.raster is deprecated since version 3.2, and will be " + "removed in a future release. If you make use of this functionality, " + "please contact the Iris Developers to discuss how to retain it (which may " + "involve reversing the deprecation)." +) +warn_deprecated(wmsg) + _GDAL_DATATYPES = { "i2": gdal.GDT_Int16, "i4": gdal.GDT_Int32, @@ -96,6 +105,14 @@ def export_geotiff(cube, fname): """ Writes cube data to raster file format as a PixelIsArea GeoTiff image. + .. deprecated:: 3.2.0 + + This method is scheduled to be removed in a future release, and no + replacement is currently planned. + If you make use of this functionality, please contact the Iris + Developers to discuss how to retain it (which could include reversing + the deprecation). + Args: * cube (Cube): The 2D regularly gridded cube slice to be exported. The cube must have regular, contiguous bounds. @@ -107,6 +124,13 @@ def export_geotiff(cube, fname): http://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 """ + wmsg = ( + "iris.experimental.raster.export_geotiff has been deprecated, and will " + "be removed in a future release. Please consult the docstring for " + "details." + ) + warn_deprecated(wmsg) + if cube.ndim != 2: raise ValueError("The cube must be two dimensional.") diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 9a02fbd3b16..7c5d8e99cc7 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -6,26 +6,37 @@ """ Regridding functions. -""" +.. note:: + + .. deprecated:: 3.2.0 + + This package will be removed in a future release. + The PointInCell class has now moved to :class:`iris.analysis.PointInCell`. + All the other content will be withdrawn. + + If you still use any of this, please contact the Iris Developers to + discuss how to replace it or to retain it. +""" import copy import functools import warnings import cartopy.crs as ccrs -import cf_units import numpy as np -import numpy.ma as ma import scipy.interpolate -from iris._lazy_data import map_complete_blocks +from iris._deprecation import warn_deprecated +from iris.analysis._area_weighted import ( + _regrid_area_weighted_rectilinear_src_and_grid__perform, + _regrid_area_weighted_rectilinear_src_and_grid__prepare, +) from iris.analysis._interpolation import ( get_xy_coords, get_xy_dim_coords, snapshot_grid, ) from iris.analysis._regrid import ( - RectilinearRegridder, _regrid_weighted_curvilinear_to_rectilinear__perform, _regrid_weighted_curvilinear_to_rectilinear__prepare, ) @@ -34,564 +45,38 @@ import iris.cube from iris.util import _meshgrid +wmsg = ( + "The 'iris.experimental.regrid' package is deprecated since version 3.2, " + "and will be removed in a future release. The PointInCell class has now " + "moved into iris.analysis. All its other content will be withdrawn. " + "If you still use any of this, please contact the Iris Developers to " + "discuss how to replace it or to retain it (reverse the deprecation)." +) +warn_deprecated(wmsg) -def _get_xy_coords(cube): - """ - Return the x and y coordinates from a cube. - - This function will preferentially return a pair of dimension - coordinates (if there are more than one potential x or y dimension - coordinates a ValueError will be raised). If the cube does not have - a pair of x and y dimension coordinates it will return 1D auxiliary - coordinates (including scalars). If there is not one and only one set - of x and y auxiliary coordinates a ValueError will be raised. - - Having identified the x and y coordinates, the function checks that they - have equal coordinate systems and that they do not occupy the same - dimension on the cube. - - Args: - - * cube: - An instance of :class:`iris.cube.Cube`. - - Returns: - A tuple containing the cube's x and y coordinates. - - """ - # Look for a suitable dimension coords first. - x_coords = cube.coords(axis="x", dim_coords=True) - if not x_coords: - # If there is no x coord in dim_coords look for scalars or - # monotonic coords in aux_coords. - x_coords = [ - coord - for coord in cube.coords(axis="x", dim_coords=False) - if coord.ndim == 1 and coord.is_monotonic() - ] - if len(x_coords) != 1: - raise ValueError( - "Cube {!r} must contain a single 1D x " - "coordinate.".format(cube.name()) - ) - x_coord = x_coords[0] - - # Look for a suitable dimension coords first. - y_coords = cube.coords(axis="y", dim_coords=True) - if not y_coords: - # If there is no y coord in dim_coords look for scalars or - # monotonic coords in aux_coords. - y_coords = [ - coord - for coord in cube.coords(axis="y", dim_coords=False) - if coord.ndim == 1 and coord.is_monotonic() - ] - if len(y_coords) != 1: - raise ValueError( - "Cube {!r} must contain a single 1D y " - "coordinate.".format(cube.name()) - ) - y_coord = y_coords[0] - - if x_coord.coord_system != y_coord.coord_system: - raise ValueError( - "The cube's x ({!r}) and y ({!r}) " - "coordinates must have the same coordinate " - "system.".format(x_coord.name(), y_coord.name()) - ) - - # The x and y coordinates must describe different dimensions - # or be scalar coords. - x_dims = cube.coord_dims(x_coord) - x_dim = None - if x_dims: - x_dim = x_dims[0] - - y_dims = cube.coord_dims(y_coord) - y_dim = None - if y_dims: - y_dim = y_dims[0] - - if x_dim is not None and y_dim == x_dim: - raise ValueError( - "The cube's x and y coords must not describe the " - "same data dimension." - ) - - return x_coord, y_coord - - -def _within_bounds(src_bounds, tgt_bounds, orderswap=False): - """ - Determine which target bounds lie within the extremes of the source bounds. - - Args: - - * src_bounds (ndarray): - An (n, 2) shaped array of monotonic contiguous source bounds. - * tgt_bounds (ndarray): - An (n, 2) shaped array corresponding to the target bounds. - - Kwargs: - - * orderswap (bool): - A Boolean indicating whether the target bounds are in descending order - (True). Defaults to False. - - Returns: - Boolean ndarray, indicating whether each target bound is within the - extremes of the source bounds. - - """ - min_bound = np.min(src_bounds) - 1e-14 - max_bound = np.max(src_bounds) + 1e-14 - - # Swap upper-lower is necessary. - if orderswap is True: - upper, lower = tgt_bounds.T - else: - lower, upper = tgt_bounds.T - - return ((lower <= max_bound) * (lower >= min_bound)) * ( - (upper <= max_bound) * (upper >= min_bound) - ) - - -def _cropped_bounds(bounds, lower, upper): - """ - Return a new bounds array and corresponding slice object (or indices) of - the original data array, resulting from cropping the provided bounds - between the specified lower and upper values. The bounds at the - extremities will be truncated so that they start and end with lower and - upper. - - This function will return an empty NumPy array and slice if there is no - overlap between the region covered by bounds and the region from lower to - upper. - - If lower > upper the resulting bounds may not be contiguous and the - indices object will be a tuple of indices rather than a slice object. - - Args: - - * bounds: - An (n, 2) shaped array of monotonic contiguous bounds. - * lower: - Lower bound at which to crop the bounds array. - * upper: - Upper bound at which to crop the bounds array. - - Returns: - A tuple of the new bounds array and the corresponding slice object or - indices from the zeroth axis of the original array. - - """ - reversed_flag = False - # Ensure order is increasing. - if bounds[0, 0] > bounds[-1, 0]: - # Reverse bounds - bounds = bounds[::-1, ::-1] - reversed_flag = True - - # Number of bounds. - n = bounds.shape[0] - - if lower <= upper: - if lower > bounds[-1, 1] or upper < bounds[0, 0]: - new_bounds = bounds[0:0] - indices = slice(0, 0) - else: - # A single region lower->upper. - if lower < bounds[0, 0]: - # Region extends below bounds so use first lower bound. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - if upper > bounds[-1, 1]: - # Region extends above bounds so use last upper bound. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to - # upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by lower->upper. - new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) - # Replace first and last values with specified bounds. - new_bounds[0, 0] = lower - new_bounds[-1, 1] = upper - if reversed_flag: - indices = slice(n - (uindex + 1), n - lindex) - else: - indices = slice(lindex, uindex + 1) - else: - # Two regions [0]->upper, lower->[-1] - # [0]->upper - if upper < bounds[0, 0]: - # Region outside src bounds. - new_bounds_left = bounds[0:0] - indices_left = tuple() - slice_left = slice(0, 0) - else: - if upper > bounds[-1, 1]: - # Whole of bounds. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by [0]->upper. - new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) - # Replace last value with specified bound. - new_bounds_left[-1, 1] = upper - if reversed_flag: - indices_left = tuple(range(n - (uindex + 1), n)) - slice_left = slice(n - (uindex + 1), n) - else: - indices_left = tuple(range(0, uindex + 1)) - slice_left = slice(0, uindex + 1) - # lower->[-1] - if lower > bounds[-1, 1]: - # Region is outside src bounds. - new_bounds_right = bounds[0:0] - indices_right = tuple() - slice_right = slice(0, 0) - else: - if lower < bounds[0, 0]: - # Whole of bounds. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - # Extract the bounds in our region defined by lower->[-1]. - new_bounds_right = np.copy(bounds[lindex:, :]) - # Replace first value with specified bound. - new_bounds_right[0, 0] = lower - if reversed_flag: - indices_right = tuple(range(0, n - lindex)) - slice_right = slice(0, n - lindex) - else: - indices_right = tuple(range(lindex, n)) - slice_right = slice(lindex, None) - - if reversed_flag: - # Flip everything around. - indices_left, indices_right = indices_right, indices_left - slice_left, slice_right = slice_right, slice_left - - # Combine regions. - new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) - # Use slices if possible, but if we have two regions use indices. - if indices_left and indices_right: - indices = indices_left + indices_right - elif indices_left: - indices = slice_left - elif indices_right: - indices = slice_right - else: - indices = slice(0, 0) - - if reversed_flag: - new_bounds = new_bounds[::-1, ::-1] - - return new_bounds, indices - - -def _cartesian_area(y_bounds, x_bounds): - """ - Return an array of the areas of each cell given two arrays - of cartesian bounds. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array. - * x_bounds: - An (m, 2) shaped NumPy array. - - Returns: - An (n, m) shaped Numpy array of areas. - - """ - heights = y_bounds[:, 1] - y_bounds[:, 0] - widths = x_bounds[:, 1] - x_bounds[:, 0] - return np.abs(np.outer(heights, widths)) - - -def _spherical_area(y_bounds, x_bounds, radius=1.0): - """ - Return an array of the areas of each cell on a sphere - given two arrays of latitude and longitude bounds in radians. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array of latitide bounds in radians. - * x_bounds: - An (m, 2) shaped NumPy array of longitude bounds in radians. - * radius: - Radius of the sphere. Default is 1.0. - - Returns: - An (n, m) shaped Numpy array of areas. - - """ - return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) - - -def _get_bounds_in_units(coord, units, dtype): - """Return a copy of coord's bounds in the specified units and dtype.""" - # The bounds are cast to dtype before conversion to prevent issues when - # mixing float32 and float64 types. - return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) - - -def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): - """ - Return the weighted mean of an array over the specified axis - using the provided weights (if any) and a permitted fraction of - masked data. - - Args: - - * data (array-like): - Data to be averaged. - - * weights (array-like): - An array of the same shape as the data that specifies the contribution - of each corresponding data element to the calculated mean. - - Kwargs: - - * axis (int or tuple of ints): - Axis along which the mean is computed. The default is to compute - the mean of the flattened array. - - * mdtol (float): - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of masked data exceeds - mdtol. This fraction is weighted by the `weights` array if one is - provided. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the contributing elements of data are masked. - Defaults to 0. - - Returns: - Numpy array (possibly masked) or scalar. - """ - if ma.is_masked(data): - res, unmasked_weights_sum = ma.average( - data, weights=weights, axis=axis, returned=True - ) - if mdtol < 1: - weights_sum = weights.sum(axis=axis) - frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) - mask_pt = frac_masked > mdtol - if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): - if np.isscalar(res): - res = ma.masked - elif ma.isMaskedArray(res): - res.mask |= mask_pt - else: - res = ma.masked_array(res, mask=mask_pt) - else: - res = np.average(data, weights=weights, axis=axis) - return res - - -def _regrid_area_weighted_array( - src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 +def regrid_area_weighted_rectilinear_src_and_grid( + src_cube, grid_cube, mdtol=0 ): """ - Regrid the given data from its source grid to a new grid using - an area weighted mean to determine the resulting data values. + Return a new cube with data values calculated using the area weighted + mean of data values from src_grid regridded onto the horizontal grid of + grid_cube. .. note:: - Elements in the returned array that lie either partially - or entirely outside of the extent of the source grid will - be masked irrespective of the value of mdtol. + .. deprecated:: 3.2.0 - Args: + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.AreaWeighted` scheme instead : this is an exact + replacement. - * src_data: - An N-dimensional NumPy array. - * x_dim: - The X dimension within `src_data`. - * y_dim: - The Y dimension within `src_data`. - * weights_info: - The area weights information to be used for area-weighted - regridding. + For example : - Kwargs: + .. code:: - * mdtol: - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of missing data exceeds - mdtol. This fraction is calculated based on the area of masked cells - within each target cell. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the overlapping elements of the source grid are masked. - Defaults to 0. - - Returns: - The regridded data as an N-dimensional NumPy array. The lengths - of the X and Y dimensions will now match those of the target - grid. - - """ - ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) = weights_info - - ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) = index_info - - # Ensure we have x_dim and y_dim. - x_dim_orig = x_dim - y_dim_orig = y_dim - if y_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - y_dim = src_data.ndim - 1 - if x_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - x_dim = src_data.ndim - 1 - # Move y_dim and x_dim to last dimensions - if not x_dim == src_data.ndim - 1: - src_data = np.moveaxis(src_data, x_dim, -1) - if not y_dim == src_data.ndim - 2: - if x_dim < y_dim: - # note: y_dim was shifted along by one position when - # x_dim was moved to the last dimension - src_data = np.moveaxis(src_data, y_dim - 1, -2) - elif x_dim > y_dim: - src_data = np.moveaxis(src_data, y_dim, -2) - x_dim = src_data.ndim - 1 - y_dim = src_data.ndim - 2 - - # Create empty "pre-averaging" data array that will enable the - # src_data data coresponding to a given target grid point, - # to be stacked per point. - # Note that dtype is not preserved and that the array mask - # allows for regions that do not overlap. - new_shape = list(src_data.shape) - new_shape[x_dim] = result_x_extent - new_shape[y_dim] = result_y_extent - - # Use input cube dtype or convert values to the smallest possible float - # dtype when necessary. - dtype = np.promote_types(src_data.dtype, np.float16) - - # Axes of data over which the weighted mean is calculated. - axis = (y_dim, x_dim) - - # Use previously established indices - - src_area_datas_square = src_data[ - ..., square_data_indices_y, square_data_indices_x - ] - - _, src_area_datas_required = np.broadcast_arrays( - src_area_datas_square, src_area_datas_required - ) - - src_area_datas = np.where( - src_area_datas_required, src_area_datas_square, 0 - ) - - # Flag to indicate whether the original data was a masked array. - src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False - if src_masked: - src_area_masks_square = src_data.mask[ - ..., square_data_indices_y, square_data_indices_x - ] - src_area_masks = np.where( - src_area_datas_required, src_area_masks_square, True - ) - - else: - # If the weights were originally blank, set the weights to all 1 to - # avoid divide by 0 error and set the new data mask for making the - # values 0 - src_area_weights = np.where(blank_weights, 1, src_area_weights) - - new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) - - # Broadcast the weights array to allow numpy's ma.average - # to be called. - # Assign new shape to raise error on copy. - src_area_weights.shape = src_area_datas.shape[-3:] - # Broadcast weights to match shape of data. - _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) - - # Mask the data points - if src_masked: - src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) - - # Calculate weighted mean taking into account missing data. - new_data = _weighted_mean_with_mdtol( - src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol - ) - new_data = new_data.reshape(new_shape) - if src_masked: - new_data_mask = new_data.mask - - # Mask the data if originally masked or if the result has masked points - if ma.isMaskedArray(src_data): - new_data = ma.array( - new_data, - mask=new_data_mask, - fill_value=src_data.fill_value, - dtype=dtype, - ) - elif new_data_mask.any(): - new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) - else: - new_data = new_data.astype(dtype) - - # Restore data to original form - if x_dim_orig is None and y_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.squeeze(new_data, axis=y_dim) - elif y_dim_orig is None: - new_data = np.squeeze(new_data, axis=y_dim) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - elif x_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - elif x_dim_orig < y_dim_orig: - # move the x_dim back first, so that the y_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -1, x_dim_orig) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - else: - # move the y_dim back first, so that the x_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -2, y_dim_orig) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - - return new_data - - -def regrid_area_weighted_rectilinear_src_and_grid( - src_cube, grid_cube, mdtol=0 -): - """ - Return a new cube with data values calculated using the area weighted - mean of data values from src_grid regridded onto the horizontal grid of - grid_cube. + result = src_cube.regrid(grid_cube, AreaWeighted()) This function requires that the horizontal grids of both cubes are rectilinear (i.e. expressed in terms of two orthogonal 1D coordinates) @@ -629,6 +114,15 @@ def regrid_area_weighted_rectilinear_src_and_grid( A new :class:`iris.cube.Cube` instance. """ + wmsg = ( + "The function " + "'iris.experimental.regrid." + "regrid_area_weighted_rectilinear_src_and_grid' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) + regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( src_cube, grid_cube ) @@ -638,460 +132,26 @@ def regrid_area_weighted_rectilinear_src_and_grid( return result -def _regrid_area_weighted_rectilinear_src_and_grid__prepare( - src_cube, grid_cube -): - """ - First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. - - Check inputs and calculate related info. The 'regrid info' returned - can be re-used over many 2d slices. - - """ - # Get the 1d monotonic (or scalar) src and grid coordinates. - src_x, src_y = _get_xy_coords(src_cube) - grid_x, grid_y = _get_xy_coords(grid_cube) - - # Condition 1: All x and y coordinates must have contiguous bounds to - # define areas. - if ( - not src_x.is_contiguous() - or not src_y.is_contiguous() - or not grid_x.is_contiguous() - or not grid_y.is_contiguous() - ): - raise ValueError( - "The horizontal grid coordinates of both the source " - "and grid cubes must have contiguous bounds." - ) - - # Condition 2: Everything must have the same coordinate system. - src_cs = src_x.coord_system - grid_cs = grid_x.coord_system - if src_cs != grid_cs: - raise ValueError( - "The horizontal grid coordinates of both the source " - "and grid cubes must have the same coordinate " - "system." - ) - - # Condition 3: cannot create vector coords from scalars. - src_x_dims = src_cube.coord_dims(src_x) - src_x_dim = None - if src_x_dims: - src_x_dim = src_x_dims[0] - src_y_dims = src_cube.coord_dims(src_y) - src_y_dim = None - if src_y_dims: - src_y_dim = src_y_dims[0] - if ( - src_x_dim is None - and grid_x.shape[0] != 1 - or src_y_dim is None - and grid_y.shape[0] != 1 - ): - raise ValueError( - "The horizontal grid coordinates of source cube " - "includes scalar coordinates, but the new grid does " - "not. The new grid must not require additional data " - "dimensions to be created." - ) - - # Determine whether to calculate flat or spherical areas. - # Don't only rely on coord system as it may be None. - spherical = ( - isinstance( - src_cs, - (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS), - ) - or src_x.units == "degrees" - or src_x.units == "radians" - ) - - # Get src and grid bounds in the same units. - x_units = cf_units.Unit("radians") if spherical else src_x.units - y_units = cf_units.Unit("radians") if spherical else src_y.units - - # Operate in highest precision. - src_dtype = np.promote_types(src_x.bounds.dtype, src_y.bounds.dtype) - grid_dtype = np.promote_types(grid_x.bounds.dtype, grid_y.bounds.dtype) - dtype = np.promote_types(src_dtype, grid_dtype) - - src_x_bounds = _get_bounds_in_units(src_x, x_units, dtype) - src_y_bounds = _get_bounds_in_units(src_y, y_units, dtype) - grid_x_bounds = _get_bounds_in_units(grid_x, x_units, dtype) - grid_y_bounds = _get_bounds_in_units(grid_y, y_units, dtype) - - # Create 2d meshgrids as required by _create_cube func. - meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) - - # Determine whether target grid bounds are decreasing. This must - # be determined prior to wrap_lons being called. - grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] - grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] - - # Wrapping of longitudes. - if spherical: - base = np.min(src_x_bounds) - modulus = x_units.modulus - # Only wrap if necessary to avoid introducing floating - # point errors. - if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( - base + modulus - ): - grid_x_bounds = iris.analysis.cartography.wrap_lons( - grid_x_bounds, base, modulus - ) - - # Determine whether the src_x coord has periodic boundary conditions. - circular = getattr(src_x, "circular", False) - - # Use simple cartesian area function or one that takes into - # account the curved surface if coord system is spherical. - if spherical: - area_func = _spherical_area - else: - area_func = _cartesian_area - - def _calculate_regrid_area_weighted_weights( - src_x_bounds, - src_y_bounds, - grid_x_bounds, - grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular=False, - ): - """ - Compute the area weights used for area-weighted regridding. - Args: - * src_x_bounds: - A NumPy array of bounds along the X axis defining the source grid. - * src_y_bounds: - A NumPy array of bounds along the Y axis defining the source grid. - * grid_x_bounds: - A NumPy array of bounds along the X axis defining the new grid. - * grid_y_bounds: - A NumPy array of bounds along the Y axis defining the new grid. - * grid_x_decreasing: - Boolean indicating whether the X coordinate of the new grid is - in descending order. - * grid_y_decreasing: - Boolean indicating whether the Y coordinate of the new grid is - in descending order. - * area_func: - A function that returns an (p, q) array of weights given an (p, 2) - shaped array of Y bounds and an (q, 2) shaped array of X bounds. - Kwargs: - * circular: - A boolean indicating whether the `src_x_bounds` are periodic. - Default is False. - Returns: - The area weights to be used for area-weighted regridding. - """ - # Determine which grid bounds are within src extent. - y_within_bounds = _within_bounds( - src_y_bounds, grid_y_bounds, grid_y_decreasing - ) - x_within_bounds = _within_bounds( - src_x_bounds, grid_x_bounds, grid_x_decreasing - ) - - # Cache which src_bounds are within grid bounds - cached_x_bounds = [] - cached_x_indices = [] - max_x_indices = 0 - for (x_0, x_1) in grid_x_bounds: - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) - cached_x_bounds.append(x_bounds) - cached_x_indices.append(x_indices) - # Keep record of the largest slice - if isinstance(x_indices, slice): - x_indices_size = np.sum(x_indices.stop - x_indices.start) - else: # is tuple of indices - x_indices_size = len(x_indices) - if x_indices_size > max_x_indices: - max_x_indices = x_indices_size - - # Cache which y src_bounds areas and weights are within grid bounds - cached_y_indices = [] - cached_weights = [] - max_y_indices = 0 - for j, (y_0, y_1) in enumerate(grid_y_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_y_decreasing: - y_0, y_1 = y_1, y_0 - y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) - cached_y_indices.append(y_indices) - # Keep record of the largest slice - if isinstance(y_indices, slice): - y_indices_size = np.sum(y_indices.stop - y_indices.start) - else: # is tuple of indices - y_indices_size = len(y_indices) - if y_indices_size > max_y_indices: - max_y_indices = y_indices_size - - weights_i = [] - for i, (x_0, x_1) in enumerate(grid_x_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds = cached_x_bounds[i] - x_indices = cached_x_indices[i] - - # Determine whether element i, j overlaps with src and hence - # an area weight should be computed. - # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case - # of wrapped longitudes. However if the src grid is not global - # (i.e. circular) this new cell would include a region outside of - # the extent of the src grid and thus the weight is therefore - # invalid. - outside_extent = x_0 > x_1 and not circular - if ( - outside_extent - or not y_within_bounds[j] - or not x_within_bounds[i] - ): - weights = False - else: - # Calculate weights based on areas of cropped bounds. - if isinstance(x_indices, tuple) and isinstance( - y_indices, tuple - ): - raise RuntimeError( - "Cannot handle split bounds " "in both x and y." - ) - weights = area_func(y_bounds, x_bounds) - weights_i.append(weights) - cached_weights.append(weights_i) - return ( - tuple(cached_x_indices), - tuple(cached_y_indices), - max_x_indices, - max_y_indices, - tuple(cached_weights), - ) - - ( - cached_x_indices, - cached_y_indices, - max_x_indices, - max_y_indices, - cached_weights, - ) = _calculate_regrid_area_weighted_weights( - src_x_bounds, - src_y_bounds, - grid_x_bounds, - grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular, - ) - - # Go further, calculating the full weights array that we'll need in the - # perform step and the indices we'll need to extract from the cube we're - # regridding (src_data) - - result_y_extent = len(grid_y_bounds) - result_x_extent = len(grid_x_bounds) - - # Total number of points - num_target_pts = result_y_extent * result_x_extent - - # Create empty array to hold weights - src_area_weights = np.zeros( - list((max_y_indices, max_x_indices, num_target_pts)) - ) - - # Built for the case where the source cube isn't masked - blank_weights = np.zeros((num_target_pts,)) - new_data_mask_basis = np.full( - (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ - ) - - # To permit fancy indexing, we need to store our data in an array whose - # first two dimensions represent the indices needed for the target cell. - # Since target cells can require a different number of indices, the size of - # these dimensions should be the maximum of this number. - # This means we need to track whether the data in - # that array is actually required and build those squared-off arrays - # TODO: Consider if a proper mask would be better - src_area_datas_required = np.full( - (max_y_indices, max_x_indices, num_target_pts), False - ) - square_data_indices_y = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - square_data_indices_x = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - - # Stack the weights for each target point and build the indices we'll need - # to extract the src_area_data - target_pt_ji = -1 - for j, y_indices in enumerate(cached_y_indices): - for i, x_indices in enumerate(cached_x_indices): - target_pt_ji += 1 - # Determine whether to mask element i, j based on whether - # there are valid weights. - weights = cached_weights[j][i] - if weights is False: - # Prepare for the src_data not being masked by storing the - # information that will let us fill the data with zeros and - # weights as one. The weighted average result will be the same, - # but we avoid dividing by zero. - blank_weights[target_pt_ji] = True - new_data_mask_basis[j, i] = True - else: - # Establish which indices are actually in y_indices and x_indices - if isinstance(y_indices, slice): - y_indices = list( - range( - y_indices.start, - y_indices.stop, - y_indices.step or 1, - ) - ) - else: - y_indices = list(y_indices) - - if isinstance(x_indices, slice): - x_indices = list( - range( - x_indices.start, - x_indices.stop, - x_indices.step or 1, - ) - ) - else: - x_indices = list(x_indices) - - # For the weights, we just need the lengths of these as we're - # dropping them into a pre-made array - - len_y = len(y_indices) - len_x = len(x_indices) - - src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights - - # To build the indices for the source cube, we need equal - # shaped array so we pad with 0s and record the need to mask - # them in src_area_datas_required - padded_y_indices = y_indices + [0] * (max_y_indices - len_y) - padded_x_indices = x_indices + [0] * (max_x_indices - len_x) - - square_data_indices_y[..., target_pt_ji] = np.array( - padded_y_indices - )[:, np.newaxis] - square_data_indices_x[..., target_pt_ji] = padded_x_indices - - src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True - - # Package up the return data - - weights_info = ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) - - index_info = ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) - - # Now return it - - return ( - src_x, - src_y, - src_x_dim, - src_y_dim, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - weights_info, - index_info, - ) - - -def _regrid_area_weighted_rectilinear_src_and_grid__perform( - src_cube, regrid_info, mdtol -): - """ - Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. - - Perform the prepared regrid calculation on a single 2d cube. +def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): + r""" + Return a new cube with the data values calculated using the weighted + mean of data values from :data:`src_cube` and the weights from + :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. - """ - ( - src_x, - src_y, - src_x_dim, - src_y_dim, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - weights_info, - index_info, - ) = regrid_info - - # Calculate new data array for regridded cube. - regrid = functools.partial( - _regrid_area_weighted_array, - x_dim=src_x_dim, - y_dim=src_y_dim, - weights_info=weights_info, - index_info=index_info, - mdtol=mdtol, - ) + .. note :: - new_data = map_complete_blocks( - src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape - ) + .. deprecated:: 3.2.0 - # Wrap up the data as a Cube. - regrid_callback = RectilinearRegridder._regrid - new_cube = RectilinearRegridder._create_cube( - new_data, - src_cube, - src_x_dim, - src_y_dim, - src_x, - src_y, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - regrid_callback, - ) + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.PointInCell` scheme instead : this is an exact + replacement. - # Slice out any length 1 dimensions. - indices = [slice(None, None)] * new_data.ndim - if src_x_dim is not None and new_cube.shape[src_x_dim] == 1: - indices[src_x_dim] = 0 - if src_y_dim is not None and new_cube.shape[src_y_dim] == 1: - indices[src_y_dim] = 0 - if 0 in indices: - new_cube = new_cube[tuple(indices)] + For example : - return new_cube + .. code:: - -def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): - r""" - Return a new cube with the data values calculated using the weighted - mean of data values from :data:`src_cube` and the weights from - :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. + result = src_cube.regrid(grid_cube, PointInCell()) This function requires that the :data:`src_cube` has a horizontal grid defined by a pair of X- and Y-axis coordinates which are mapped over the @@ -1134,6 +194,14 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): A :class:`iris.cube.Cube` instance. """ + wmsg = ( + "The function " + "'iris.experimental.regrid." + "regrid_weighted_curvilinear_to_rectilinear' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) regrid_info = _regrid_weighted_curvilinear_to_rectilinear__prepare( src_cube, weights, grid_cube ) @@ -1572,6 +640,16 @@ def __init__(self, projection=None): Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. + .. note:: + + .. deprecated:: 3.2.0 + + This class is scheduled to be removed in a future release, and no + replacement is currently planned. + If you make use of this functionality, please contact the Iris + Developers to discuss how to retain it (which could include + reversing the deprecation). + Optional Args: * projection: `cartopy.crs instance` @@ -1581,6 +659,12 @@ def __init__(self, projection=None): """ self.projection = projection + wmsg = ( + "The class iris.experimental.regrid.ProjectedUnstructuredLinear " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): """ @@ -1639,6 +723,17 @@ def __init__(self, projection=None): Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. + .. note:: + + .. deprecated:: 3.2.0 + + This class is scheduled to be removed in a future release, and no + exact replacement is currently planned. + Please use :class:`iris.analysis.UnstructuredNearest` instead, if + possible. If you have a need for this exact functionality, please + contact the Iris Developers to discuss how to retain it (which + could include reversing the deprecation). + Optional Args: * projection: `cartopy.crs instance` @@ -1648,6 +743,13 @@ def __init__(self, projection=None): """ self.projection = projection + wmsg = ( + "iris.experimental.regrid.ProjectedUnstructuredNearest has been " + "deprecated, and will be removed in a future release. " + "Please use 'iris.analysis.UnstructuredNearest' instead, where " + "possible. Consult the docstring for details." + ) + warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): """ diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 421bd86c935..bfa048ddf0d 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -6,16 +6,35 @@ """ Support for conservative regridding via ESMPy. +.. note:: + + .. deprecated:: 3.2.0 + + This package will be removed in a future release. + Please use + `iris-esmf-regrid `_ + instead. + """ import cartopy.crs as ccrs import numpy as np import iris +from iris._deprecation import warn_deprecated from iris.analysis._interpolation import get_xy_dim_coords from iris.analysis._regrid import RectilinearRegridder from iris.util import _meshgrid +wmsg = ( + "The 'iris.experimental.regrid_conservative' package is deprecated since " + "version 3.2, and will be removed in a future release. Please use " + "iris-emsf-regrid instead. " + "See https://github.com/SciTools-incubator/iris-esmf-regrid." +) +warn_deprecated(wmsg) + + #: A static Cartopy Geodetic() instance for transforming to true-lat-lons. _CRS_TRUELATLON = ccrs.Geodetic() @@ -131,6 +150,22 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): """ Perform a conservative regridding with ESMPy. + .. note :: + + .. deprecated:: 3.2.0 + + This function is scheduled to be removed in a future release. + Please use + `iris-esmf-regrid `_ + instead. + + For example : + + .. code:: + + from emsf_regrid.schemes import ESMFAreaWeighted + result = src_cube.regrid(grid_cube, ESMFAreaWeighted()) + Regrids the data of a source cube onto a new grid defined by a destination cube. @@ -169,6 +204,15 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): To alter this, make a prior call to ESMF.Manager(). """ + wmsg = ( + "The function " + "'iris.experimental.regrid_conservative." + "regrid_weighted_curvilinear_to_rectilinear' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) + # Lazy import so we can build the docs with no ESMF. import ESMF diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 5e42f2e6060..6c802e00d46 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -280,7 +280,7 @@ def _build_aux_coord(coord_var, file_path): return coord, axis -def _build_connectivity(connectivity_var, file_path, location_dims): +def _build_connectivity(connectivity_var, file_path, element_dims): """ Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a given :class:`~iris.experimental.ugrid.cf.CFUGridConnectivityVariable`, @@ -301,10 +301,10 @@ def _build_connectivity(connectivity_var, file_path, location_dims): dim_names = connectivity_var.dimensions # Connectivity arrays must have two dimensions. assert len(dim_names) == 2 - if dim_names[1] in location_dims: - src_dim = 1 + if dim_names[1] in element_dims: + location_axis = 1 else: - src_dim = 0 + location_axis = 0 standard_name, long_name, var_name = get_names( connectivity_var, None, attributes @@ -319,7 +319,7 @@ def _build_connectivity(connectivity_var, file_path, location_dims): units=attr_units, attributes=attributes, start_index=start_index, - src_dim=src_dim, + location_axis=location_axis, ) return connectivity, dim_names[0] @@ -423,20 +423,20 @@ def _build_mesh(cf, mesh_var, file_path): raise ValueError(message) # Used for detecting transposed connectivities. - location_dims = (edge_dimension, face_dimension) + element_dims = (edge_dimension, face_dimension) connectivity_args = [] for connectivity_var in mesh_var.cf_group.connectivities.values(): connectivity, first_dim_name = _build_connectivity( - connectivity_var, file_path, location_dims + connectivity_var, file_path, element_dims ) assert connectivity.var_name == getattr(mesh_var, connectivity.cf_role) connectivity_args.append(connectivity) # If the mesh_var has not supplied the dimension name, it is safe to # fall back on the connectivity's first dimension's name. - if edge_dimension is None and connectivity.src_location == "edge": + if edge_dimension is None and connectivity.location == "edge": edge_dimension = first_dim_name - if face_dimension is None and connectivity.src_location == "face": + if face_dimension is None and connectivity.location == "face": face_dimension = first_dim_name standard_name, long_name, var_name = get_names(mesh_var, None, attributes) @@ -480,12 +480,12 @@ def _build_mesh_coords(mesh, cf_var): """ # TODO: integrate with standard saving API when no longer 'experimental'. # Identify the cube's mesh dimension, for attaching MeshCoords. - locations_dimensions = { + element_dimensions = { "node": mesh.node_dimension, "edge": mesh.edge_dimension, "face": mesh.face_dimension, } - mesh_dim_name = locations_dimensions[cf_var.location] + mesh_dim_name = element_dimensions[cf_var.location] # (Only expecting 1 mesh dimension per cf_var). mesh_dim = cf_var.dimensions.index(mesh_dim_name) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 0f2bfd844cb..974a5630463 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -12,9 +12,10 @@ """ from abc import ABC, abstractmethod from collections import namedtuple -import re +from collections.abc import Container from typing import Iterable +from cf_units import Unit from dask import array as da import numpy as np @@ -24,10 +25,11 @@ metadata_filter, metadata_manager_factory, ) +from ...common.metadata import BaseMetadata from ...config import get_logger from ...coords import AuxCoord, _DimensionalMetadata from ...exceptions import ConnectivityNotFoundError, CoordinateNotFoundError -from ...util import array_equal, guess_coord_axis +from ...util import array_equal, clip_string, guess_coord_axis from .metadata import ConnectivityMetadata, MeshCoordMetadata, MeshMetadata # Configure the logger. @@ -92,8 +94,8 @@ class Connectivity(_DimensionalMetadata): """ A CF-UGRID topology connectivity, describing the topological relationship - between two lists of dimensional locations. One or more connectivities - make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. + between two types of mesh element. One or more connectivities make up a + CF-UGRID topology - a constituent of a CF-UGRID mesh. See: https://ugrid-conventions.github.io/ugrid-conventions @@ -122,7 +124,7 @@ def __init__( units=None, attributes=None, start_index=0, - src_dim=0, + location_axis=0, ): """ Constructs a single connectivity. @@ -130,17 +132,23 @@ def __init__( Args: * indices (numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array): - The index values describing a topological relationship. Constructed - of 2 dimensions - the list of locations, and within each location: - the indices of the 'target locations' it relates to. - Use a :class:`numpy.ma.core.MaskedArray` if :attr:`src_location` - lengths vary - mask unused index 'slots' within each - :attr:`src_location`. Use a :class:`dask.array.Array` to keep - indices 'lazy'. + 2D array giving the topological connection relationship between + :attr:`location` elements and :attr:`connected` elements. + The :attr:`location_axis` dimension indexes over the + :attr:`location` dimension of the mesh - i.e. its length matches + the total number of :attr:`location` elements in the mesh. The + :attr:`connected_axis` dimension can be any length, corresponding + to the highest number of :attr:`connected` elements connected to a + :attr:`location` element. The array values are indices into the + :attr:`connected` dimension of the mesh. If the number of + :attr:`connected` elements varies between :attr:`location` + elements: use a :class:`numpy.ma.core.MaskedArray` and mask the + :attr:`location` elements' unused index 'slots'. Use a + :class:`dask.array.Array` to keep indices 'lazy'. * cf_role (str): Denotes the topological relationship that this connectivity - describes. Made up of this array's locations, and the indexed - 'target location' within each location. + describes. Made up of this array's :attr:`location`, and the + :attr:`connected` element type that is indexed by the array. See :attr:`UGRID_CF_ROLES` for valid arguments. Kwargs: @@ -164,14 +172,14 @@ def __init__( Either ``0`` or ``1``. Default is ``0``. Denotes whether :attr:`indices` uses 0-based or 1-based indexing (allows support for Fortran and legacy NetCDF files). - * src_dim (int): - Either ``0`` or ``1``. Default is ``0``. Denotes which dimension - of :attr:`indices` varies over the :attr:`src_location`\\ s (the - alternate dimension therefore varying within individual - :attr:`src_location`\\ s). (This parameter allows support for fastest varying index being + * location_axis (int): + Either ``0`` or ``1``. Default is ``0``. Denotes which axis + of :attr:`indices` varies over the :attr:`location` elements (the + alternate axis therefore varying over :attr:`connected` elements). + (This parameter allows support for fastest varying index being either first or last). E.g. for ``face_node_connectivity``, for 10 faces: - ``indices.shape[src_dim] = 10``. + ``indices.shape[location_axis] == 10``. """ @@ -188,15 +196,15 @@ def validate_arg_vs_list(arg_name, arg, valid_list): validate_arg_vs_list("start_index", start_index, [0, 1]) # indices array will be 2-dimensional, so must be either 0 or 1. - validate_arg_vs_list("src_dim", src_dim, [0, 1]) + validate_arg_vs_list("location_axis", location_axis, [0, 1]) validate_arg_vs_list("cf_role", cf_role, Connectivity.UGRID_CF_ROLES) self._metadata_manager.start_index = start_index - self._metadata_manager.src_dim = src_dim + self._metadata_manager.location_axis = location_axis self._metadata_manager.cf_role = cf_role - self._tgt_dim = 1 - src_dim - self._src_location, self._tgt_location = cf_role.split("_")[:2] + self._connected_axis = 1 - location_axis + self._location, self._connected = cf_role.split("_")[:2] super().__init__( values=indices, @@ -207,58 +215,6 @@ def validate_arg_vs_list(arg_name, arg, valid_list): attributes=attributes, ) - def __repr__(self): - def kwargs_filter(k, v): - result = False - if k != "cf_role": - if v is not None: - result = True - if ( - not isinstance(v, str) - and isinstance(v, Iterable) - and not v - ): - result = False - elif k == "units" and v == "unknown": - result = False - return result - - def array2repr(array): - if self.has_lazy_indices(): - result = repr(array) - else: - with np.printoptions( - threshold=NP_PRINTOPTIONS_THRESHOLD, - edgeitems=NP_PRINTOPTIONS_EDGEITEMS, - ): - result = re.sub("\n *", " ", repr(array)) - return result - - # positional arguments - args = ", ".join( - [ - f"{array2repr(self.core_indices())}", - f"cf_role={self.cf_role!r}", - ] - ) - - # optional arguments (metadata) - kwargs = ", ".join( - [ - f"{k}={v!r}" - for k, v in self.metadata._asdict().items() - if kwargs_filter(k, v) - ] - ) - - return f"{self.__class__.__name__}({', '.join([args, kwargs])})" - - def __str__(self): - args = ", ".join( - [f"cf_role={self.cf_role!r}", f"start_index={self.start_index!r}"] - ) - return f"{self.__class__.__name__}({args})" - @property def _values(self): # Overridden just to allow .setter override. @@ -283,25 +239,25 @@ def cf_role(self): return self._metadata_manager.cf_role @property - def src_location(self): + def location(self): """ Derived from the connectivity's :attr:`cf_role` - the first part, e.g. - ``face`` in ``face_node_connectivity``. Refers to the locations - listed by the :attr:`src_dim` of the connectivity's :attr:`indices` - array. + ``face`` in ``face_node_connectivity``. Refers to the elements that + vary along the :attr:`location_axis` of the connectivity's + :attr:`indices` array. """ - return self._src_location + return self._location @property - def tgt_location(self): + def connected(self): """ Derived from the connectivity's :attr:`cf_role` - the second part, e.g. - ``node`` in ``face_node_connectivity``. Refers to the locations indexed + ``node`` in ``face_node_connectivity``. Refers to the elements indexed by the values in the connectivity's :attr:`indices` array. """ - return self._tgt_location + return self._connected @property def start_index(self): @@ -316,47 +272,48 @@ def start_index(self): return self._metadata_manager.start_index @property - def src_dim(self): + def location_axis(self): """ - The dimension of the connectivity's :attr:`indices` array that varies - over the connectivity's :attr:`src_location`\\ s. Either ``0`` or ``1``. + The axis of the connectivity's :attr:`indices` array that varies + over the connectivity's :attr:`location` elements. Either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on - :attr:`src_dim`. Use :meth:`transpose` to create a new, transposed - :class:`Connectivity` if a different :attr:`src_dim` is needed. + :attr:`location_axis`. Use :meth:`transpose` to create a new, transposed + :class:`Connectivity` if a different :attr:`location_axis` is needed. """ - return self._metadata_manager.src_dim + return self._metadata_manager.location_axis @property - def tgt_dim(self): + def connected_axis(self): """ - Derived as the alternate value of :attr:`src_dim` - each must equal - either ``0`` or ``1``. - The dimension of the connectivity's :attr:`indices` array that varies - within the connectivity's individual :attr:`src_location`\\ s. + Derived as the alternate value of :attr:`location_axis` - each must + equal either ``0`` or ``1``. The axis of the connectivity's + :attr:`indices` array that varies over the :attr:`connected` elements + associated with each :attr:`location` element. """ - return self._tgt_dim + return self._connected_axis @property def indices(self): """ The index values describing the topological relationship of the connectivity, as a NumPy array. Masked points indicate a - :attr:`src_location` shorter than the longest :attr:`src_location` - described in this array - unused index 'slots' are masked. + :attr:`location` element with fewer :attr:`connected` elements than + other :attr:`location` elements described in this array - unused index + 'slots' are masked. **Read-only** - index values are only meaningful when combined with an appropriate :attr:`cf_role`, :attr:`start_index` and - :attr:`src_dim`. A new :class:`Connectivity` must therefore be + :attr:`location_axis`. A new :class:`Connectivity` must therefore be defined if different indices are needed. """ return self._values - def indices_by_src(self, indices=None): + def indices_by_location(self, indices=None): """ - Return a view of the indices array with :attr:`src_dim` **always** as - the first index - transposed if necessary. Can optionally pass in an + Return a view of the indices array with :attr:`location_axis` **always** as + the first axis - transposed if necessary. Can optionally pass in an identically shaped array on which to perform this operation (e.g. the output from :meth:`core_indices` or :meth:`lazy_indices`). @@ -368,7 +325,7 @@ def indices_by_src(self, indices=None): Returns: A view of the indices array, transposed - if necessary - to put - :attr:`src_dim` first. + :attr:`location_axis` first. """ if indices is None: @@ -380,20 +337,20 @@ def indices_by_src(self, indices=None): f"got shape={indices.shape} ." ) - if self.src_dim == 0: + if self.location_axis == 0: result = indices - elif self.src_dim == 1: + elif self.location_axis == 1: result = indices.transpose() else: - raise ValueError("Invalid src_dim.") + raise ValueError("Invalid location_axis.") return result def _validate_indices(self, indices, shapes_only=False): # Use shapes_only=True for a lower resource, less thorough validation # of indices by just inspecting the array shape instead of inspecting - # individual masks. So will not catch individual src_locations being - # unacceptably small. + # individual masks. So will not catch individual location elements + # having unacceptably low numbers of associated connected elements. def indices_error(message): raise ValueError("Invalid indices provided. " + message) @@ -422,43 +379,43 @@ def indices_error(message): len_req_fail = False if shapes_only: - src_shape = indices_shape[self.tgt_dim] + location_shape = indices_shape[self.connected_axis] # Wrap as lazy to allow use of the same operations below # regardless of shapes_only. - src_lengths = _lazy.as_lazy_data(np.asarray(src_shape)) + location_lengths = _lazy.as_lazy_data(np.asarray(location_shape)) else: # Wouldn't be safe to use during __init__ validation, since - # lazy_src_lengths requires self.indices to exist. Safe here since + # lazy_location_lengths requires self.indices to exist. Safe here since # shapes_only==False is only called manually, i.e. after # initialisation. - src_lengths = self.lazy_src_lengths() - if self.src_location in ("edge", "boundary"): - if (src_lengths != 2).any().compute(): + location_lengths = self.lazy_location_lengths() + if self.location in ("edge", "boundary"): + if (location_lengths != 2).any().compute(): len_req_fail = "len=2" else: - if self.src_location == "face": + if self.location == "face": min_size = 3 - elif self.src_location == "volume": - if self.tgt_location == "edge": + elif self.location == "volume": + if self.connected == "edge": min_size = 6 else: min_size = 4 else: raise NotImplementedError - if (src_lengths < min_size).any().compute(): + if (location_lengths < min_size).any().compute(): len_req_fail = f"len>={min_size}" if len_req_fail: indices_error( - f"Not all src_locations meet requirement: {len_req_fail} - " + f"Not all {self.location}s meet requirement: {len_req_fail} - " f"needed to describe '{self.cf_role}' ." ) def validate_indices(self): """ Perform a thorough validity check of this connectivity's - :attr:`indices`. Includes checking the sizes of individual - :attr:`src_location`\\ s (specified using masks on the - :attr:`indices` array) against the :attr:`cf_role`. + :attr:`indices`. Includes checking the number of :attr:`connected` + elements associated with each :attr:`location` element (specified using + masks on the :attr:`indices` array) against the :attr:`cf_role`. Raises a ``ValueError`` if any problems are encountered, otherwise passes silently. @@ -476,8 +433,8 @@ def __eq__(self, other): if isinstance(other, Connectivity): # Account for the fact that other could be the transposed equivalent # of self, which we consider 'safe' since the recommended - # interaction with the indices array is via indices_by_src, which - # corrects for this difference. (To enable this, src_dim does + # interaction with the indices array is via indices_by_location, which + # corrects for this difference. (To enable this, location_axis does # not participate in ConnectivityMetadata to ConnectivityMetadata # equivalence). if hasattr(other, "metadata"): @@ -486,22 +443,22 @@ def __eq__(self, other): if eq: eq = ( self.shape == other.shape - and self.src_dim == other.src_dim + and self.location_axis == other.location_axis ) or ( self.shape == other.shape[::-1] - and self.src_dim == other.tgt_dim + and self.location_axis == other.connected_axis ) if eq: eq = array_equal( - self.indices_by_src(self.core_indices()), - other.indices_by_src(other.core_indices()), + self.indices_by_location(self.core_indices()), + other.indices_by_location(other.core_indices()), ) return eq def transpose(self): """ Create a new :class:`Connectivity`, identical to this one but with the - :attr:`indices` array transposed and the :attr:`src_dim` value flipped. + :attr:`indices` array transposed and the :attr:`location_axis` value flipped. Returns: A new :class:`Connectivity` that is the transposed equivalent of @@ -517,7 +474,7 @@ def transpose(self): units=self.units, attributes=self.attributes, start_index=self.start_index, - src_dim=self.tgt_dim, + location_axis=self.connected_axis, ) return new_connectivity @@ -560,11 +517,11 @@ def has_lazy_indices(self): """ return super()._has_lazy_values() - def lazy_src_lengths(self): + def lazy_location_lengths(self): """ - Return a lazy array representing the lengths of each - :attr:`src_location` in the :attr:`src_dim` of the connectivity's - :attr:`indices` array, accounting for masks if present. + Return a lazy array representing the number of :attr:`connected` + elements associated with each of the connectivity's :attr:`location` + elements, accounting for masks if present. Accessing this method will never cause the :attr:`indices` values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -574,26 +531,28 @@ def lazy_src_lengths(self): :attr:`indices` have already been loaded. Returns: - A lazy array, representing the lengths of each :attr:`src_location`. + A lazy array, representing the number of :attr:`connected` + elements associated with each :attr:`location` element. """ - src_mask_counts = da.sum( - da.ma.getmaskarray(self.indices), axis=self.tgt_dim + location_mask_counts = da.sum( + da.ma.getmaskarray(self.indices), axis=self.connected_axis ) - max_src_size = self.indices.shape[self.tgt_dim] - return max_src_size - src_mask_counts + max_location_size = self.indices.shape[self.connected_axis] + return max_location_size - location_mask_counts - def src_lengths(self): + def location_lengths(self): """ - Return a NumPy array representing the lengths of each - :attr:`src_location` in the :attr:`src_dim` of the connectivity's - :attr:`indices` array, accounting for masks if present. + Return a NumPy array representing the number of :attr:`connected` + elements associated with each of the connectivity's :attr:`location` + elements, accounting for masks if present. Returns: - A NumPy array, representing the lengths of each :attr:`src_location`. + A NumPy array, representing the number of :attr:`connected` + elements associated with each :attr:`location` element. """ - return self.lazy_src_lengths().compute() + return self.lazy_location_lengths().compute() def cube_dims(self, cube): """Not available on :class:`Connectivity`.""" @@ -606,7 +565,7 @@ def xml_element(self, doc): element.setAttribute("cf_role", self.cf_role) element.setAttribute("start_index", self.start_index) - element.setAttribute("src_dim", self.src_dim) + element.setAttribute("location_axis", self.location_axis) return element @@ -632,8 +591,8 @@ class Mesh(CFVariableMixin): AXES = ("x", "y") #: Valid range of values for ``topology_dimension``. TOPOLOGY_DIMENSIONS = (1, 2) - #: Valid mesh locations. - LOCATIONS = ("edge", "node", "face") + #: Valid mesh elements. + ELEMENTS = ("edge", "node", "face") def __init__( self, @@ -684,12 +643,12 @@ def __init__( self.attributes = attributes # based on the topology_dimension, create the appropriate coordinate manager - def normalise(location, axis): + def normalise(element, axis): result = str(axis).lower() if result not in self.AXES: - emsg = f"Invalid axis specified for {location} coordinate {coord.name()!r}, got {axis!r}." + emsg = f"Invalid axis specified for {element} coordinate {coord.name()!r}, got {axis!r}." raise ValueError(emsg) - return f"{location}_{result}" + return f"{element}_{result}" if not isinstance(node_coords_and_axes, Iterable): node_coords_and_axes = [node_coords_and_axes] @@ -975,77 +934,143 @@ def __ne__(self, other): result = not result return result - def __repr__(self): - def to_coord_and_axis(members): - def axis(member): - return member.split("_")[1] - - result = [ - f"({coord!s}, {axis(member)!r})" - for member, coord in members._asdict().items() - if coord is not None - ] - result = f"[{', '.join(result)}]" if result else None - return result + def summary(self, shorten=False): + """ + Return a string representation of the Mesh. - node_coords_and_axes = to_coord_and_axis(self.node_coords) - connectivities = [ - str(connectivity) - for connectivity in self.all_connectivities - if connectivity is not None - ] + Parameters + ---------- + shorten : bool, default = False + If True, produce a oneline string form of the form . + If False, produce a multi-line detailed print output. + + Returns + ------- + result : str - if len(connectivities) == 1: - connectivities = connectivities[0] + """ + if shorten: + result = self._summary_oneline() else: - connectivities = f"[{', '.join(connectivities)}]" + result = self._summary_multiline() + return result - # positional arguments - args = [ - f"topology_dimension={self.topology_dimension!r}", - f"node_coords_and_axes={node_coords_and_axes}", - f"connectivities={connectivities}", - ] + def __repr__(self): + return self.summary(shorten=True) - # optional argument - edge_coords_and_axes = to_coord_and_axis(self.edge_coords) - if edge_coords_and_axes: - args.append(f"edge_coords_and_axes={edge_coords_and_axes}") - - # optional argument - if self.topology_dimension > 1: - face_coords_and_axes = to_coord_and_axis(self.face_coords) - if face_coords_and_axes: - args.append(f"face_coords_and_axes={face_coords_and_axes}") - - def kwargs_filter(k, v): - result = False - if k != "topology_dimension": - if not ( - self.topology_dimension == 1 and k == "face_dimension" - ): - if v is not None: - result = True - if ( - not isinstance(v, str) - and isinstance(v, Iterable) - and not v - ): - result = False - elif k == "units" and v == "unknown": - result = False - return result - - # optional arguments (metadata) - args.extend( - [ - f"{k}={v!r}" - for k, v in self.metadata._asdict().items() - if kwargs_filter(k, v) - ] - ) + def __str__(self): + return self.summary(shorten=False) + + def _summary_oneline(self): + # We use the repr output to produce short one-line identity summary, + # similar to the object.__str__ output "". + # This form also used in other str() constructions, like MeshCoord. + # By contrast, __str__ (below) produces a readable multi-line printout. + mesh_name = self.name() + if mesh_name in (None, "", "unknown"): + mesh_name = None + if mesh_name: + # Use a more human-readable form + mesh_string = f"" + else: + # Mimic the generic object.__str__ style. + mesh_id = id(self) + mesh_string = f"" - return f"{self.__class__.__name__}({', '.join(args)})" + return mesh_string + + def _summary_multiline(self): + # Produce a readable multi-line summary of the Mesh content. + lines = [] + n_indent = 4 + indent_str = " " * n_indent + + def line(text, i_indent=0): + indent = indent_str * i_indent + lines.append(f"{indent}{text}") + + line(f"Mesh : '{self.name()}'") + line(f"topology_dimension: {self.topology_dimension}", 1) + for element in ("node", "edge", "face"): + if element == "node": + element_exists = True + else: + main_conn_name = f"{element}_node_connectivity" + main_conn = getattr(self, main_conn_name, None) + element_exists = main_conn is not None + if element_exists: + # Include a section for this element + line(element, 1) + # Print element dimension + dim_name = f"{element}_dimension" + dim = getattr(self, dim_name) + line(f"{dim_name}: '{dim}'", 2) + # Print defining connectivity (except node) + if element != "node": + main_conn_string = main_conn.summary( + shorten=True, linewidth=0 + ) + line(f"{main_conn_name}: {main_conn_string}", 2) + # Print coords + include_key = f"include_{element}s" + coords = self.coords(**{include_key: True}) + if coords: + line(f"{element} coordinates", 2) + for coord in coords: + coord_string = coord.summary(shorten=True, linewidth=0) + line(coord_string, 3) + + # Having dealt with essential info, now add any optional connectivities + # N.B. includes boundaries: as optional connectivity, not an "element" + optional_conn_names = ( + "boundary_connectivity", + "face_face_connectivity", + "face_edge_connectivity", + "edge_face_connectivity", + ) + optional_conns = [ + getattr(self, name, None) for name in optional_conn_names + ] + optional_conns = { + name: conn + for conn, name in zip(optional_conns, optional_conn_names) + if conn is not None + } + if optional_conns: + line("optional connectivities", 1) + for name, conn in optional_conns.items(): + conn_string = conn.summary(shorten=True, linewidth=0) + line(f"{name}: {conn_string}", 2) + + # Output the detail properties, basically those from CFVariableMixin + for name in BaseMetadata._members: + val = getattr(self, name, None) + if val is not None: + if name == "units": + show = val.origin != Unit(None) + elif isinstance(val, Container): + show = bool(val) + else: + show = val is not None + if show: + if name == "attributes": + # Use a multi-line form for this. + line("attributes:", 1) + max_attname_len = max(len(attr) for attr in val.keys()) + for attrname, attrval in val.items(): + attrname = attrname.ljust(max_attname_len) + if isinstance(attrval, str): + # quote strings + attrval = repr(attrval) + # and abbreviate really long ones + attrval = clip_string(attrval) + attr_string = f"{attrname} {attrval}" + line(attr_string, 2) + else: + line(f"{name}: {val!r}", 1) + + result = "\n".join(lines) + return result def __setstate__(self, state): metadata_manager, coord_manager, connectivity_manager = state @@ -1375,17 +1400,17 @@ def connectivities( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. @@ -1476,17 +1501,17 @@ def connectivity( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. @@ -1743,17 +1768,17 @@ def remove_connectivities( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. @@ -2089,12 +2114,12 @@ def _remove(self, **kwargs): return result - def _setter(self, location, axis, coord, shape): + def _setter(self, element, axis, coord, shape): axis = axis.lower() - member = f"{location}_{axis}" + member = f"{element}_{axis}" # enforce the UGRID minimum coordinate requirement - if location == "node" and coord is None: + if element == "node" and coord is None: emsg = ( f"{member!r} is a required coordinate, cannot set to 'None'." ) @@ -2121,22 +2146,22 @@ def _setter(self, location, axis, coord, shape): self._members[member] = coord - def _shape(self, location): - coord = getattr(self, f"{location}_x") + def _shape(self, element): + coord = getattr(self, f"{element}_x") shape = coord.shape if coord is not None else None if shape is None: - coord = getattr(self, f"{location}_y") + coord = getattr(self, f"{element}_y") if coord is not None: shape = coord.shape return shape @property def _edge_shape(self): - return self._shape(location="edge") + return self._shape(element="edge") @property def _node_shape(self): - return self._shape(location="node") + return self._shape(element="node") @property def all_members(self): @@ -2153,7 +2178,7 @@ def edge_x(self): @edge_x.setter def edge_x(self, coord): self._setter( - location="edge", axis="x", coord=coord, shape=self._edge_shape + element="edge", axis="x", coord=coord, shape=self._edge_shape ) @property @@ -2163,7 +2188,7 @@ def edge_y(self): @edge_y.setter def edge_y(self, coord): self._setter( - location="edge", axis="y", coord=coord, shape=self._edge_shape + element="edge", axis="y", coord=coord, shape=self._edge_shape ) @property @@ -2177,7 +2202,7 @@ def node_x(self): @node_x.setter def node_x(self, coord): self._setter( - location="node", axis="x", coord=coord, shape=self._node_shape + element="node", axis="x", coord=coord, shape=self._node_shape ) @property @@ -2187,7 +2212,7 @@ def node_y(self): @node_y.setter def node_y(self, coord): self._setter( - location="node", axis="y", coord=coord, shape=self._node_shape + element="node", axis="y", coord=coord, shape=self._node_shape ) def _add(self, coords): @@ -2363,7 +2388,7 @@ def __init__( @property def _face_shape(self): - return self._shape(location="face") + return self._shape(element="face") @property def all_members(self): @@ -2380,7 +2405,7 @@ def face_x(self): @face_x.setter def face_x(self, coord): self._setter( - location="face", axis="x", coord=coord, shape=self._face_shape + element="face", axis="x", coord=coord, shape=self._face_shape ) @property @@ -2390,7 +2415,7 @@ def face_y(self): @face_y.setter def face_y(self, coord): self._setter( - location="face", axis="y", coord=coord, shape=self._face_shape + element="face", axis="y", coord=coord, shape=self._face_shape ) def add( @@ -2510,24 +2535,20 @@ def add(self, *connectivities): # Validate shapes. proposed_members = {**self._members, **add_dict} - locations = set( - [ - c.src_location - for c in proposed_members.values() - if c is not None - ] + elements = set( + [c.location for c in proposed_members.values() if c is not None] ) - for location in locations: + for element in elements: counts = [ - len(c.indices_by_src(c.lazy_indices())) + len(c.indices_by_location(c.lazy_indices())) for c in proposed_members.values() - if c is not None and c.src_location == location + if c is not None and c.location == element ] # Check is list values are identical. if not counts.count(counts[0]) == len(counts): message = ( f"Invalid Connectivities provided - inconsistent " - f"{location} counts." + f"{element} counts." ) raise ValueError(message) @@ -2582,13 +2603,16 @@ def filters( instance for instance in members if instance.cf_role == cf_role ] - def location_filter(instances, loc_arg, loc_name): + def element_filter(instances, loc_arg, loc_name): if loc_arg is False: filtered = [ instance for instance in instances if loc_name - not in (instance.src_location, instance.tgt_location) + not in ( + instance.location, + instance.connected, + ) ] elif loc_arg is None: filtered = instances @@ -2597,8 +2621,7 @@ def location_filter(instances, loc_arg, loc_name): filtered = [ instance for instance in instances - if loc_name - in (instance.src_location, instance.tgt_location) + if loc_name in (instance.location, instance.connected) ] return filtered @@ -2608,7 +2631,7 @@ def location_filter(instances, loc_arg, loc_name): (contains_edge, "edge"), (contains_face, "face"), ): - members = location_filter(members, arg, loc) + members = element_filter(members, arg, loc) # No need to actually modify filtering behaviour - already won't return # any face cf-roles if none are present. @@ -2790,10 +2813,10 @@ def __init__( # NOTE: currently *not* included in metadata. In future it might be. self._mesh = mesh - if location not in Mesh.LOCATIONS: + if location not in Mesh.ELEMENTS: msg = ( f"'location' of {location} is not a valid Mesh location', " - f"must be one of {Mesh.LOCATIONS}." + f"must be one of {Mesh.ELEMENTS}." ) raise ValueError(msg) # Held in metadata, readable as self.location, but cannot set it. @@ -2943,62 +2966,62 @@ def __eq__(self, other): def __hash__(self): return hash(id(self)) - def _string_summary(self, repr_style): - # Note: bypass the immediate parent here, which is Coord, because we - # have no interest in reporting coord_system or climatological, or in - # printing out our points/bounds. - # We also want to list our defining properties, i.e. mesh/location/axis - # *first*, before names/units etc, so different from other Coord types. - - # First construct a shortform text summary to identify the Mesh. - # IN 'str-mode', this attempts to use Mesh.name() if it is set, - # otherwise uses an object-id style (as also for 'repr-mode'). - # TODO: use a suitable method provided by Mesh, e.g. something like - # "Mesh.summary(shorten=True)", when it is available. - mesh_name = None - if not repr_style: - mesh_name = self.mesh.name() - if mesh_name in (None, "", "unknown"): - mesh_name = None - if mesh_name: - # Use a more human-readable form - mesh_string = f"Mesh({mesh_name!r})" + def summary(self, *args, **kwargs): + # We need to specialise _DimensionalMetadata.summary, so that we always + # print the mesh+location of a MeshCoord. + if len(args) > 0: + shorten = args[0] else: - # Mimic the generic object.__str__ style. - mesh_id = id(self.mesh) - mesh_string = f"" - result = ( - f"mesh={mesh_string}" - f", location={self.location!r}" - f", axis={self.axis!r}" - ) - # Add 'other' metadata that is drawn from the underlying node-coord. - # But put these *afterward*, unlike other similar classes. - for item in ( - "shape", - "standard_name", - "units", - "long_name", - "attributes", - ): - # NOTE: order of these matches Coord.summary, but omit var_name. - val = getattr(self, item, None) - if item == "attributes": - is_blank = len(val) == 0 # an empty dict is as good as none - else: - is_blank = val is None - if not is_blank: - result += f", {item}={val!r}" - - result = f"MeshCoord({result})" + shorten = kwargs.get("shorten", False) + + # Get the default-form result. + if shorten: + # NOTE: we simply aren't interested in the values for the repr, + # so fix linewidth to suppress them + kwargs["linewidth"] = 1 + + # Plug private key, to get back the section structure info + section_indices = {} + kwargs["_section_indices"] = section_indices + result = super().summary(*args, **kwargs) + + # Modify the generic 'default-form' result to produce what we want. + if shorten: + # Single-line form : insert mesh+location before the array part + # Construct a text detailing the mesh + location + mesh_string = self.mesh.name() + if mesh_string == "unknown": + # If no name, replace with the one-line summary + mesh_string = self.mesh.summary(shorten=True) + extra_str = f"mesh({mesh_string}) location({self.location}) " + # find where in the line the data-array text begins + i_line, i_array = section_indices["data"] + assert i_line == 0 + # insert the extra text there + result = result[:i_array] + extra_str + result[i_array:] + # NOTE: this invalidates the original width calculation and may + # easily extend the result beyond the intended maximum linewidth. + # We do treat that as an advisory control over array printing, not + # an absolute contract, so just ignore the problem for now. + else: + # Multiline form + # find where the "location: ... " section is + i_location, i_namestart = section_indices["location"] + lines = result.split("\n") + location_line = lines[i_location] + # copy the indent spacing + indent = location_line[:i_namestart] + # use that to construct a suitable 'mesh' line + mesh_string = self.mesh.summary(shorten=True) + mesh_line = f"{indent}mesh: {mesh_string}" + # Move the 'location' line, putting it and the 'mesh' line right at + # the top, immediately after the header line. + del lines[i_location] + lines[1:1] = [mesh_line, location_line] + # Re-join lines to give the result + result = "\n".join(lines) return result - def __str__(self): - return self._string_summary(repr_style=False) - - def __repr__(self): - return self._string_summary(repr_style=True) - def _construct_access_arrays(self): """ Build lazy points and bounds arrays, providing dynamic access via the @@ -3035,7 +3058,7 @@ def _construct_access_arrays(self): # Data can be real or lazy, so operations must work in Dask, too. indices = bounds_connectivity.core_indices() # Normalise indices dimension order to [faces/edges, bounds] - indices = bounds_connectivity.indices_by_src(indices) + indices = bounds_connectivity.indices_by_location(indices) # Normalise the start index indices = indices - bounds_connectivity.start_index diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index 94128cdf508..ae0b7879084 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -28,9 +28,9 @@ class ConnectivityMetadata(BaseMetadata): """ - # The "src_dim" member is stateful only, and does not participate in + # The "location_axis" member is stateful only, and does not participate in # lenient/strict equivalence. - _members = ("cf_role", "start_index", "src_dim") + _members = ("cf_role", "start_index", "location_axis") __slots__ = () @@ -53,7 +53,7 @@ def _combine_lenient(self, other): A list of combined metadata member values. """ - # Perform "strict" combination for "cf_role", "start_index", "src_dim". + # Perform "strict" combination for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) right = getattr(other, field) @@ -82,9 +82,10 @@ def _compare_lenient(self, other): """ # Perform "strict" comparison for "cf_role", "start_index". - # The "src_dim" member is not part of lenient equivalence. + # The "location_axis" member is not part of lenient equivalence. members = filter( - lambda member: member != "src_dim", ConnectivityMetadata._members + lambda member: member != "location_axis", + ConnectivityMetadata._members, ) result = all( [ @@ -112,7 +113,7 @@ def _difference_lenient(self, other): A list of difference metadata member values. """ - # Perform "strict" difference for "cf_role", "start_index", "src_dim". + # Perform "strict" difference for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) right = getattr(other, field) diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index f2b0cfc0955..96a848deb04 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -17,7 +17,7 @@ UriProtocol, ) -from . import abf, name, netcdf, nimrod, pp, um +from . import name, netcdf, nimrod, pp, um __all__ = ["FORMAT_AGENT"] @@ -224,16 +224,23 @@ def _load_grib(*args, **kwargs): # # ABF/ABL +# TODO: now deprecated, remove later # +def load_cubes_abf_abl(*args, **kwargs): + from . import abf + + return abf.load_cubes(*args, **kwargs) + + FORMAT_AGENT.add_spec( FormatSpecification( - "ABF", FileExtension(), ".abf", abf.load_cubes, priority=3 + "ABF", FileExtension(), ".abf", load_cubes_abf_abl, priority=3 ) ) FORMAT_AGENT.add_spec( FormatSpecification( - "ABL", FileExtension(), ".abl", abf.load_cubes, priority=3 + "ABL", FileExtension(), ".abl", load_cubes_abf_abl, priority=3 ) ) diff --git a/lib/iris/fileformats/_ff_cross_references.py b/lib/iris/fileformats/_ff_cross_references.py index 0c7af263242..ca41f5257fa 100644 --- a/lib/iris/fileformats/_ff_cross_references.py +++ b/lib/iris/fileformats/_ff_cross_references.py @@ -3,11 +3,9 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -# -# DO NOT EDIT: AUTO-GENERATED """ -Auto-generated from iris/tools/gen_stash_refs.py -Relates grid code and field code to the stash code. +Table providing UM grid-code, field-code and pseudolevel-type for (some) +stash codes. Used in UM file i/o. """ diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d286abbf3d6..4c5184deb13 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -18,7 +18,7 @@ 3) Iris-specific info is (still) stored in additional properties created on the engine object : - engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + engine.cf_var, .cube, .cube_parts, .requires, .rules_triggered, .filename Our "rules" are just action routines. The top-level 'run_actions' routine decides which actions to call, based on the @@ -78,7 +78,7 @@ def inner(engine, *args, **kwargs): # but also may vary depending on whether it successfully # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) - engine.rule_triggered.add(rule_name) + engine.rules_triggered.add(rule_name) func._rulenames_func = _default_rulenamesfunc return inner diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index a5b507d5830..198daeceeaa 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -440,10 +440,13 @@ def build_mercator_coordinate_system(engine, cf_grid_var): longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. + # scale_factor_at_projection_origin. This is checked elsewhere. ellipsoid = None if ( @@ -454,7 +457,11 @@ def build_mercator_coordinate_system(engine, cf_grid_var): ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, ellipsoid=ellipsoid + longitude_of_projection_origin, + ellipsoid=ellipsoid, + standard_parallel=standard_parallel, + false_easting=false_easting, + false_northing=false_northing, ) return cs @@ -1244,27 +1251,10 @@ def has_supported_mercator_parameters(engine, cf_name): is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] - false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) - if false_easting is not None and false_easting != 0: - warnings.warn( - "False eastings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False - if false_northing is not None and false_northing != 0: - warnings.warn( - "False northings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False if ( scale_factor_at_projection_origin is not None and scale_factor_at_projection_origin != 1 @@ -1274,12 +1264,6 @@ def has_supported_mercator_parameters(engine, cf_name): "Mercator projections" ) is_valid = False - if standard_parallel is not None and standard_parallel != 0: - warnings.warn( - "Standard parallels other than 0.0 not yet " - "supported for Mercator projections" - ) - is_valid = False return is_valid diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 678d9b04cf5..5c70c5acf27 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -23,11 +23,20 @@ import numpy.ma as ma import iris +from iris._deprecation import warn_deprecated from iris.coord_systems import GeogCS from iris.coords import AuxCoord, DimCoord import iris.fileformats import iris.io.format_picker +wmsg = ( + "iris.fileformats.abf has been deprecated and will be removed in a " + "future release. If you make use of this functionality, please contact " + "the Iris Developers to discuss how to retain it (which may involve " + "reversing the deprecation)." +) +warn_deprecated(wmsg) + X_SIZE = 4320 Y_SIZE = 2160 diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index cc857c7f6ba..2fb628bebfd 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -12,8 +12,17 @@ import subprocess import iris +from iris._deprecation import warn_deprecated import iris.util +wmsg = ( + "iris.fileformats.dot has been deprecated and will be removed in a " + "future release. If you make use of this functionality, please contact " + "the Iris Developers to discuss how to retain it (which may involve " + "reversing the deprecation)." +) +warn_deprecated(wmsg) + _GRAPH_INDENT = " " * 4 _SUBGRAPH_INDENT = " " * 8 diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 77134259ad9..80f213dbc2f 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -498,7 +498,7 @@ def _actions_activation_stats(engine, cf_name): print("Rules Triggered:") - for rule in sorted(list(engine.rule_triggered)): + for rule in sorted(list(engine.rules_triggered)): print("\t%s" % rule) print("Case Specific Facts:") @@ -570,13 +570,21 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) -class OrderedAddableList(list): - # Used purely in actions debugging, to accumulate a record of which actions - # were activated. - # It replaces a set, so as to record the ordering of operations, with - # possible repeats, and it also numbers the entries. - # Actions routines invoke the 'add' method, which thus effectively converts - # a set.add into a list.append. +class _OrderedAddableList(list): + """ + A custom container object for actions recording. + + Used purely in actions debugging, to accumulate a record of which actions + were activated. + + It replaces a set, so as to preserve the ordering of operations, with + possible repeats, and it also numbers the entries. + + The actions routines invoke an 'add' method, so this effectively replaces + a set.add with a list.append. + + """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._n_add = 0 @@ -602,7 +610,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = OrderedAddableList() + engine.rules_triggered = _OrderedAddableList() engine.filename = filename # Assert all the case-specific facts. @@ -825,12 +833,12 @@ def inner(cf_datavar): def load_cubes(filenames, callback=None, constraints=None): """ - Loads cubes from a list of NetCDF filenames/URLs. + Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. Args: * filenames (string/list): - One or more NetCDF filenames/DAP URLs to load from. + One or more NetCDF filenames/OPeNDAP URLs to load from. Kwargs: @@ -972,9 +980,9 @@ def __setitem__(self, keys, arr): self.target[keys] = arr -# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.LOCATIONS`, +# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, # but in the preferred order for coord/connectivity variables in the file. -MESH_LOCATIONS = ("node", "edge", "face") +MESH_ELEMENTS = ("node", "edge", "face") class Saver: @@ -1422,7 +1430,7 @@ def _add_mesh(self, cube_or_mesh): mesh_dims = self._mesh_dims[mesh] # Add all the element coordinate variables. - for location in MESH_LOCATIONS: + for location in MESH_ELEMENTS: coords_meshobj_attr = f"{location}_coords" coords_file_attr = f"{location}_coordinates" mesh_coords = getattr(mesh, coords_meshobj_attr, None) @@ -1460,7 +1468,7 @@ def _add_mesh(self, cube_or_mesh): last_dim = f"{cf_mesh_name}_{loc_from}_N_{loc_to}s" # Create if it does not already exist. if last_dim not in self._dataset.dimensions: - length = conn.shape[1 - conn.src_dim] + length = conn.shape[1 - conn.location_axis] self._dataset.createDimension(last_dim, length) # Create variable. @@ -1470,7 +1478,7 @@ def _add_mesh(self, cube_or_mesh): # when it is first created. loc_dim_name = mesh_dims[loc_from] conn_dims = (loc_dim_name, last_dim) - if conn.src_dim == 1: + if conn.location_axis == 1: # Has the 'other' dimension order, =reversed conn_dims = conn_dims[::-1] if iris.util.is_masked(conn.core_indices()): @@ -1494,7 +1502,7 @@ def _add_mesh(self, cube_or_mesh): _setncattr(cf_mesh_var, cf_conn_attr_name, cf_conn_name) # If the connectivity had the 'alternate' dimension order, add the # relevant dimension property - if conn.src_dim == 1: + if conn.location_axis == 1: loc_dim_attr = f"{loc_from}_dimension" # Should only get here once. assert loc_dim_attr not in cf_mesh_var.ncattrs() @@ -1813,7 +1821,7 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): # NOTE: one of these will be a cube dimension, but that one does not # get any special handling. We *do* want to list/create them in a # definite order (node,edge,face), and before non-mesh dimensions. - for location in MESH_LOCATIONS: + for location in MESH_ELEMENTS: # Find if this location exists in the mesh, and a characteristic # coordinate to identify it with. # To use only _required_ UGRID components, we use a location @@ -1850,7 +1858,9 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): (dim_length,) = dim_element.shape else: # extract source dim, respecting dim-ordering - dim_length = dim_element.shape[dim_element.src_dim] + dim_length = dim_element.shape[ + dim_element.location_axis + ] # Name it for the relevant mesh dimension location_dim_attr = f"{location}_dimension" dim_name = getattr(mesh, location_dim_attr) @@ -2551,10 +2561,8 @@ def add_ellipsoid(ellipsoid): cf_var_grid.longitude_of_projection_origin = ( cs.longitude_of_projection_origin ) - # The Mercator class has implicit defaults for certain - # parameters - cf_var_grid.false_easting = 0.0 - cf_var_grid.false_northing = 0.0 + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing cf_var_grid.scale_factor_at_projection_origin = 1.0 # lcc @@ -2736,9 +2744,9 @@ def _create_cf_data_variable( cmin, cmax = _co_realise_lazy_arrays([cmin, cmax]) n = dtype.itemsize * 8 if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dtype.kind == "u": add_offset = cmin elif dtype.kind == "i": diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 9f213ec4db9..9bda98bf61e 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -403,7 +403,7 @@ def _calculate_str_value_from_value(self): def _calculate_value_from_str_value(self): self._value = np.sum( - [10 ** i * val for i, val in enumerate(self._strvalue)] + [10**i * val for i, val in enumerate(self._strvalue)] ) def __len__(self): @@ -418,7 +418,7 @@ def __getitem__(self, key): # if the key returns a list of values, then combine them together # to an integer if isinstance(val, list): - val = sum([10 ** i * val for i, val in enumerate(val)]) + val = sum([10**i * val for i, val in enumerate(val)]) return val diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index c2a0a5d09e2..8aee67ae3ec 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -3,14 +3,6 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -# -# DO NOT EDIT: AUTO-GENERATED -# Created on 29 November 2019 14:11 from -# http://www.metarelate.net/metOcean -# at commit 448f2ef5e676edaaa27408b9f3ddbecbf05e3289 -# -# https://github.com/metarelate/metOcean/commit/448f2ef5e676edaaa27408b9f3ddbecbf05e3289 - """ Provides UM/CF phenomenon translations. @@ -689,7 +681,7 @@ 'm01s09i212': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_4p5_oktas', 'kft'), 'm01s09i213': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_5p5_oktas', 'kft'), 'm01s09i214': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_6p5_oktas', 'kft'), - 'm01s09i215': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_7p5_oktas', 'kft'), + 'm01s09i215': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_7p9_oktas', 'kft'), 'm01s09i216': CFName(None, 'cloud_area_fraction_assuming_random_overlap', '1'), 'm01s09i217': CFName(None, 'cloud_area_fraction_assuming_maximum_random_overlap', '1'), 'm01s09i218': CFName(None, 'cloud_area_fraction_assuming_only_consider_surface_to_1000_feet_asl', '1'), diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 64501afd1eb..8d5a2e05d20 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -12,6 +12,7 @@ from collections import OrderedDict import glob import os.path +import pathlib import re import iris.exceptions @@ -85,8 +86,9 @@ def decode_uri(uri, default="file"): r""" Decodes a single URI into scheme and scheme-specific parts. - In addition to well-formed URIs, it also supports bare file paths. - Both Windows and UNIX style paths are accepted. + In addition to well-formed URIs, it also supports bare file paths as strings + or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are + accepted. .. testsetup:: @@ -113,6 +115,8 @@ def decode_uri(uri, default="file"): ('file', 'dataZoo/...') """ + if isinstance(uri, pathlib.PurePath): + uri = str(uri) # make sure scheme has at least 2 letters to avoid windows drives # put - last in the brackets so it refers to the character, not a range # reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1 @@ -212,7 +216,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Takes a list of urls and a callback function, and returns a generator + Takes a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. .. note:: @@ -222,11 +226,11 @@ def load_http(urls, callback): """ # Create default dict mapping iris format handler to its associated filenames + from iris.fileformats import FORMAT_AGENT + handler_map = collections.defaultdict(list) for url in urls: - handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec( - url, None - ) + handling_format_spec = FORMAT_AGENT.get_spec(url, None) handler_map[handling_format_spec].append(url) # Call each iris format handler with the appropriate filenames @@ -312,7 +316,8 @@ def find_saver(filespec): Args: - * filespec - A string such as "my_file.pp" or "PP". + * filespec + A string such as "my_file.pp" or "PP". Returns: A save function or None. @@ -359,7 +364,8 @@ def save(source, target, saver=None, **kwargs): * target: A filename (or writeable, depending on file format). When given a filename or file, Iris can determine the - file format. + file format. Filename can be given as a string or + :class:`pathlib.PurePath`. Kwargs: @@ -414,6 +420,8 @@ def save(source, target, saver=None, **kwargs): from iris.cube import Cube, CubeList # Determine format from filename + if isinstance(target, pathlib.PurePath): + target = str(target) if isinstance(target, str) and saver is None: saver = find_saver(target) elif hasattr(target, "name") and saver is None: diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 0e9645c7835..aefca889cf5 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -591,7 +591,7 @@ def _fixup_dates(coord, values): r = [datetime.datetime(*date) for date in dates] else: try: - import nc_time_axis + import nc_time_axis # noqa: F401 except ImportError: msg = ( "Cannot plot against time in a non-gregorian " @@ -603,12 +603,10 @@ def _fixup_dates(coord, values): raise IrisError(msg) r = [ - nc_time_axis.CalendarDateTime( - cftime.datetime(*date, calendar=coord.units.calendar), - coord.units.calendar, - ) + cftime.datetime(*date, calendar=coord.units.calendar) for date in dates ] + values = np.empty(len(r), dtype=object) values[:] = r return values @@ -675,7 +673,7 @@ def _get_plot_objects(args): if ( isinstance(v_object, iris.cube.Cube) and isinstance(u_object, iris.coords.Coord) - and iris.util.guess_coord_axis(u_object) in ["Y", "Z"] + and iris.util.guess_coord_axis(u_object) == "Z" ): u_object, v_object = v_object, u_object u, v = v, u diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 340f37dda71..9f0baeda35f 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -18,10 +18,6 @@ from iris.coords import AuxCoord -if tests.NC_TIME_AXIS_AVAILABLE: - from nc_time_axis import CalendarDateTime - - # Run tests in no graphics mode if matplotlib is not available. if tests.MPL_AVAILABLE: import iris.plot as iplt @@ -48,9 +44,8 @@ def test_360_day_calendar(self): ) for atime in times ] - expected_ydata = np.array( - [CalendarDateTime(time, calendar) for time in times] - ) + + expected_ydata = times (line1,) = iplt.plot(time_coord) result_ydata = line1.get_ydata() self.assertArrayEqual(expected_ydata, result_ydata) diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py index f7aaa1d05c0..2a45561e177 100644 --- a/lib/iris/tests/integration/test_netcdf.py +++ b/lib/iris/tests/integration/test_netcdf.py @@ -416,7 +416,7 @@ def setUp(self): levels.units = "centimeters" levels.positive = "down" levels.axis = "Z" - levels[:] = np.linspace(0, 10 ** 5, 3) + levels[:] = np.linspace(0, 10**5, 3) volcello.id = "volcello" volcello.out_name = "volcello" @@ -507,9 +507,9 @@ def _get_scale_factor_add_offset(cube, datatype): else: masked = False if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dt.kind == "u": add_offset = cmin elif dt.kind == "i": diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index a7d6c89994a..a8e3acaa410 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -216,8 +216,10 @@ def setUp(self): cube.coord("depth").bounds = b32 self.cube = cube # define a latitude trajectory (put coords in a different order - # to the cube, just to be awkward) - latitudes = list(range(-90, 90, 2)) + # to the cube, just to be awkward) although avoid south pole + # singularity as a sample point and the issue of snapping to + # multi-equidistant closest points from within orca antarctic hole + latitudes = list(range(-80, 90, 2)) longitudes = [-90] * len(latitudes) self.sample_points = [ ("longitude", longitudes), @@ -226,7 +228,9 @@ def setUp(self): def test_tri_polar(self): # extract - sampled_cube = traj_interpolate(self.cube, self.sample_points) + sampled_cube = traj_interpolate( + self.cube, self.sample_points, method="nearest" + ) self.assertCML( sampled_cube, ("trajectory", "tri_polar_latitude_slice.cml") ) @@ -329,8 +333,12 @@ def test_hybrid_height(self): # Put a lazy array into the cube so we can test deferred loading. cube.data = as_lazy_data(cube.data) + # Use opionated grid-latitudes to avoid the issue of platform + # specific behaviour within SciPy cKDTree choosing a different + # equi-distant nearest neighbour point when there are multiple + # valid candidates. traj = ( - ("grid_latitude", [20.5, 21.5, 22.5, 23.5]), + ("grid_latitude", [20.4, 21.6, 22.6, 23.6]), ("grid_longitude", [31, 32, 33, 34]), ) xsec = traj_interpolate(cube, traj, method="nearest") diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt index a6738e654f3..a4c1157df26 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt @@ -10,4 +10,4 @@ air_potential_temperature / (K) (scalar cube) surface_altitude 413.93686 m time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt index 95f7e7b57e0..7d43a997dac 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (grid_longitude: 100) sigma 0.9994238, bound=(1.0, 0.99846387) time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt index c4184d199ac..9adeb35c738 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (grid_latitude: 100; grid_longitude: 100) sigma 0.9994238, bound=(1.0, 0.99846387) time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt index af81d4e9914..dc5e71433f8 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (model_level_number: 70; grid_latitude: 100; forecast_period 0.0 hours time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt index afcdedf100b..52adc03efb6 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt b/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt index 6a3276d8618..02e9849d38b 100644 --- a/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt @@ -8,4 +8,4 @@ air_temperature / (K) (scalar cube) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt b/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt index 6a3276d8618..02e9849d38b 100644 --- a/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt +++ b/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt @@ -8,4 +8,4 @@ air_temperature / (K) (scalar cube) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt index ba93542e518..ffb6a62daf6 100644 --- a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt @@ -14,4 +14,4 @@ air_temperature / (K) (latitude: 73; longitude: 96) percentile longitude (6 minutes, This is another test comment) Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt b/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt index 1b86bd6597d..0ac5bd3c8af 100644 --- a/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt +++ b/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt @@ -11,4 +11,4 @@ air_potential_temperature / (K) (-- : 6; -- : 70; grid_latitude: 100; grid_l Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt b/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt index fc274ed4c1d..394e52e5c90 100644 --- a/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt @@ -15,4 +15,4 @@ air_temperature / (K) (latitude: 73; longitude: 96) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt b/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt index 29c181345c4..594ad116888 100644 --- a/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt +++ b/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt @@ -2,4 +2,4 @@ thingness / (1) (foo: 11) Dimension coordinates: foo x Attributes: - source ꀀabcd޴ \ No newline at end of file + source 'ꀀabcd\u07b4' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt index c1d62f28e45..3e7aeda3092 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt @@ -1,11 +1 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt index c1d62f28e45..3030ea962a9 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt @@ -1,11 +1,15 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file +DimCoord : level_height / (m) + points: [ + 5. , 21.666664, 45. , 75. , 111.66668 , + 155. , 205. , 261.6667 , 325. , 395. ] + bounds: [ + [ 0. , 13.333332], + [ 13.333332, 33.333332], + ..., + [293.3333 , 360. ], + [360. , 433.3332 ]] + shape: (10,) bounds(10, 2) + dtype: float32 + long_name: 'level_height' + attributes: + positive 'up' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt b/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt index 120546311fd..57d5882e88e 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt @@ -1,2 +1 @@ -DimCoord(array([347921.16666667, 347921.33333333, 347921.5 , 347921.66666666, - 347921.83333333, 347922. ]), standard_name='time', units=Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt index 9d209402e6f..f9cd09223a0 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt @@ -1,2 +1,5 @@ -DimCoord([2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, - 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00], standard_name='time', calendar='gregorian') \ No newline at end of file +DimCoord : forecast_period / (hours) + points: [0.] + shape: (1,) + dtype: float64 + standard_name: 'forecast_period' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt index c1d62f28e45..cbf44b1dbbd 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt @@ -1,11 +1 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt index c1d62f28e45..95c0a601596 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt @@ -1,11 +1,14 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file +DimCoord : grid_latitude / (degrees) + points: [ + -0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, + -0.1215, -0.1206, -0.1197] + bounds: [ + [-0.12825, -0.12735], + [-0.12735, -0.12645], + ..., + [-0.12105, -0.12015], + [-0.12015, -0.11925]] + shape: (10,) bounds(10, 2) + dtype: float32 + standard_name: 'grid_latitude' + coord_system: RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0)) \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt index 120546311fd..ae1b86ae023 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt @@ -1,2 +1 @@ -DimCoord(array([347921.16666667, 347921.33333333, 347921.5 , 347921.66666666, - 347921.83333333, 347922. ]), standard_name='time', units=Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt index 9d209402e6f..6b95b572156 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt @@ -1,2 +1,7 @@ -DimCoord([2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, - 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00], standard_name='time', calendar='gregorian') \ No newline at end of file +DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [ + 2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, + 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00] + shape: (6,) + dtype: float64 + standard_name: 'time' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_systems/Mercator.xml b/lib/iris/tests/results/coord_systems/Mercator.xml index e8036ef8244..db3ccffec78 100644 --- a/lib/iris/tests/results/coord_systems/Mercator.xml +++ b/lib/iris/tests/results/coord_systems/Mercator.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/derived/no_orog.__str__.txt b/lib/iris/tests/results/derived/no_orog.__str__.txt index e277b5d2765..de139592a6e 100644 --- a/lib/iris/tests/results/derived/no_orog.__str__.txt +++ b/lib/iris/tests/results/derived/no_orog.__str__.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/derived/removed_orog.__str__.txt b/lib/iris/tests/results/derived/removed_orog.__str__.txt index 0c24cded808..a9ef3bd0171 100644 --- a/lib/iris/tests/results/derived/removed_orog.__str__.txt +++ b/lib/iris/tests/results/derived/removed_orog.__str__.txt @@ -12,4 +12,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/derived/removed_sigma.__str__.txt b/lib/iris/tests/results/derived/removed_sigma.__str__.txt index 94e850ec629..c3eaf484833 100644 --- a/lib/iris/tests/results/derived/removed_sigma.__str__.txt +++ b/lib/iris/tests/results/derived/removed_sigma.__str__.txt @@ -12,4 +12,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 79560a53658..6a997c38b43 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -684,7 +684,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1.png" ], "iris.tests.test_plot.TestPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff99c067e01e7166101c9c6b04396b5cd4e2f0993163de9c4fe7b79207e36a1.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896266f068d873b83cb71e435725cd07c607ad07e70fcd0007a7881fe7ab8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896066f068d873b83cb71e435725cd07c607ad07c70fcd0007af881fe7bb8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896366f0f8d93398bcb71e435f24ed074646ed07670acf010726d81f2798c.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/aff8946c7a14c99fb193d263e42432d8d00c2d27944a3f8dc5223ef703ff6b90.png" ], "iris.tests.test_plot.TestPlot.test_z.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" @@ -874,7 +877,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3f9bc067e01c6166009c9c6b5439ee5cd4e0d2993361de9ccf65b79887636a9.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07c586cd001da69897e5838.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07cd86cd001da68897e58a8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a7efb6367f008d97338fc973e435d86ef030c86ed070d86cd030d86d89f0d82c.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_z.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" diff --git a/lib/iris/tests/results/netcdf/netcdf_merc.cml b/lib/iris/tests/results/netcdf/netcdf_merc.cml index 02fc4e7c345..5e17400158a 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc.cml @@ -53,15 +53,15 @@ 45.5158, 45.9993]]" shape="(192, 192)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml new file mode 100644 index 00000000000..d916f5f7539 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml index 701ca7beb7a..750d5974937 100644 --- a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml +++ b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml @@ -76,8 +76,25 @@ - + @@ -86,8 +103,26 @@ - + @@ -109,6 +144,6 @@ - + diff --git a/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml b/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml deleted file mode 100644 index 07eeb531574..00000000000 --- a/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml +++ /dev/null @@ -1,144 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index e753adbae85..24f2b894426 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -249,7 +249,7 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) self.assertCMLApproxData(a, ("analysis", "apply_ufunc_original.cml")) @@ -259,14 +259,14 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=True, ) self.assertCMLApproxData(b, ("analysis", "apply_ufunc.cml")) self.assertCMLApproxData(a, ("analysis", "apply_ufunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -295,7 +295,7 @@ def test_apply_ufunc_fail(self): def test_ifunc(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) b = my_ifunc(a, new_name="squared temperature", in_place=False) self.assertCMLApproxData(a, ("analysis", "apply_ifunc_original.cml")) @@ -307,7 +307,7 @@ def test_ifunc(self): self.assertCMLApproxData(a, ("analysis", "apply_ifunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -347,7 +347,7 @@ def test_ifunc_init_fail(self): def test_ifunc_call_fail(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) # should now NOT fail because giving 2 arguments to an ifunc that # expects only one will now ignore the surplus argument and raise @@ -367,7 +367,7 @@ def test_ifunc_call_fail(self): my_ifunc(a) my_ifunc = iris.analysis.maths.IFunc( - lambda a: (a, a ** 2.0), lambda cube: cf_units.Unit("1") + lambda a: (a, a**2.0), lambda cube: cf_units.Unit("1") ) # should fail because data function returns a tuple @@ -553,9 +553,9 @@ def test_square_root(self): a.data = abs(a.data) a.units **= 2 - e = a ** 0.5 + e = a**0.5 - self.assertArrayAllClose(e.data, a.data ** 0.5) + self.assertArrayAllClose(e.data, a.data**0.5) self.assertCML(e, ("analysis", "sqrt.cml"), checksum=False) self.assertRaises(ValueError, iris.analysis.maths.exponentiate, a, 0.3) @@ -585,26 +585,26 @@ def test_apply_ufunc(self): np.square, a, new_name="more_thingness", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) b = iris.analysis.maths.apply_ufunc(vec_mag_ufunc, a, c) - ans = a.data ** 2 + c.data ** 2 - b2 = b ** 2 + ans = a.data**2 + c.data**2 + b2 = b**2 self.assertArrayAlmostEqual(b2.data, ans) @@ -617,17 +617,17 @@ def test_ifunc(self): a = self.cube a.units = cf_units.Unit("meters") - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units**2) b = my_ifunc(a, new_name="more_thingness", in_place=False) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -637,12 +637,12 @@ def vec_mag(u, v): ) b = my_ifunc(a, c) - ans = (a.data ** 2 + c.data ** 2) ** 0.5 + ans = (a.data**2 + c.data**2) ** 0.5 self.assertArrayAlmostEqual(b.data, ans) def vec_mag_data_func(u_data, v_data): - return np.sqrt(u_data ** 2 + v_data ** 2) + return np.sqrt(u_data**2 + v_data**2) vec_mag_ifunc = iris.analysis.maths.IFunc( vec_mag_data_func, lambda a, b: (a + b).units diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 1b2ba563006..87270b524c3 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -178,7 +178,9 @@ def test_complex(self): @tests.skip_data class TestCoord_ReprStr_nontime(tests.IrisTest): def setUp(self): - self.lat = iris.tests.stock.realistic_4d().coord("grid_latitude")[:10] + cube = iris.tests.stock.realistic_4d() + self.lat = cube.coord("grid_latitude")[:10] + self.height = cube.coord("level_height")[:10] def test_DimCoord_repr(self): self.assertRepr( @@ -187,7 +189,7 @@ def test_DimCoord_repr(self): def test_AuxCoord_repr(self): self.assertRepr( - self.lat, ("coord_api", "str_repr", "aux_nontime_repr.txt") + self.height, ("coord_api", "str_repr", "aux_nontime_repr.txt") ) def test_DimCoord_str(self): @@ -197,14 +199,16 @@ def test_DimCoord_str(self): def test_AuxCoord_str(self): self.assertString( - str(self.lat), ("coord_api", "str_repr", "aux_nontime_str.txt") + str(self.height), ("coord_api", "str_repr", "aux_nontime_str.txt") ) @tests.skip_data class TestCoord_ReprStr_time(tests.IrisTest): def setUp(self): - self.time = iris.tests.stock.realistic_4d().coord("time") + cube = iris.tests.stock.realistic_4d() + self.time = cube.coord("time") + self.fp = cube.coord("forecast_period") def test_DimCoord_repr(self): self.assertRepr( @@ -213,7 +217,7 @@ def test_DimCoord_repr(self): def test_AuxCoord_repr(self): self.assertRepr( - self.time, ("coord_api", "str_repr", "aux_time_repr.txt") + self.fp, ("coord_api", "str_repr", "aux_time_repr.txt") ) def test_DimCoord_str(self): @@ -223,7 +227,7 @@ def test_DimCoord_str(self): def test_AuxCoord_str(self): self.assertString( - str(self.time), ("coord_api", "str_repr", "aux_time_str.txt") + str(self.fp), ("coord_api", "str_repr", "aux_time_str.txt") ) @@ -232,23 +236,29 @@ def test_basic(self): a = iris.coords.AuxCoord( np.arange(10), "air_temperature", units="kelvin" ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin'))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) b = iris.coords.AuxCoord( list(range(10)), attributes={"monty": "python"} ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name=None," - " units=Unit('unknown')," - " attributes={'monty': 'python'})" + result = "\n".join( + [ + "AuxCoord : unknown / (unknown)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " attributes:", + " monty 'python'", + ] ) self.assertEqual(result, str(b)) @@ -276,12 +286,15 @@ def test_coord_system(self): units="kelvin", coord_system=iris.coord_systems.GeogCS(6000), ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin')," - " coord_system=GeogCS(6000.0))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + " coord_system: GeogCS(6000.0)", + ] ) self.assertEqual(result, str(a)) @@ -292,14 +305,20 @@ def test_bounded(self): units="kelvin", bounds=np.arange(0, 20).reshape(10, 2), ) - result = ( - "AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([" - "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " - "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " - "[12, 13],\n [14, 15],\n [16, 17],\n " - "[18, 19]])" - ", standard_name='air_temperature', units=Unit('kelvin'))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " bounds: [", + " [ 0, 1],", + " [ 2, 3],", + " ...,", + " [16, 17],", + " [18, 19]]", + " shape: (10,) bounds(10, 2)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) @@ -322,23 +341,29 @@ def test_basic(self): a = iris.coords.DimCoord( np.arange(10), "air_temperature", units="kelvin" ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin'))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) b = iris.coords.DimCoord( list(range(10)), attributes={"monty": "python"} ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name=None," - " units=Unit('unknown')," - " attributes={'monty': 'python'})" + result = "\n".join( + [ + "DimCoord : unknown / (unknown)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " attributes:", + " monty 'python'", + ] ) self.assertEqual(result, str(b)) @@ -366,12 +391,15 @@ def test_coord_system(self): units="kelvin", coord_system=iris.coord_systems.GeogCS(6000), ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin')," - " coord_system=GeogCS(6000.0))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + " coord_system: GeogCS(6000.0)", + ] ) self.assertEqual(result, str(a)) @@ -382,14 +410,20 @@ def test_bounded(self): units="kelvin", bounds=np.arange(0, 20).reshape(10, 2), ) - result = ( - "DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([" - "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " - "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " - "[12, 13],\n [14, 15],\n [16, 17],\n " - "[18, 19]])" - ", standard_name='air_temperature', units=Unit('kelvin'))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " bounds: [", + " [ 0, 1],", + " [ 2, 3],", + " ...,", + " [16, 17],", + " [18, 19]]", + " shape: (10,) bounds(10, 2)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 2c0569f0d66..42291259694 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -188,6 +188,29 @@ def test_as_cartopy_globe(self): self.assertEqual(res, expected) +class Test_GeogCS_as_cartopy_projection(tests.IrisTest): + def test_as_cartopy_projection(self): + geogcs_args = { + "semi_major_axis": 6543210, + "semi_minor_axis": 6500000, + "longitude_of_prime_meridian": 30, + } + cs = GeogCS(**geogcs_args) + res = cs.as_cartopy_projection() + + globe = ccrs.Globe( + semimajor_axis=geogcs_args["semi_major_axis"], + semiminor_axis=geogcs_args["semi_minor_axis"], + ellipse=None, + ) + expected = ccrs.PlateCarree( + globe=globe, + central_longitude=geogcs_args["longitude_of_prime_meridian"], + ) + + self.assertEqual(res, expected) + + class Test_GeogCS_as_cartopy_crs(tests.IrisTest): def test_as_cartopy_crs(self): cs = GeogCS(6543210, 6500000) diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index 2b5619c6568..3b751cfcbed 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -13,6 +13,7 @@ from io import StringIO import os +import pathlib import unittest import iris @@ -97,6 +98,23 @@ def test_filename(self): CHKSUM_ERR.format(self.ext), ) + def test_filename_path_object(self): + # Save using iris.save and pp.save, passing filename for + # iris.save as pathlib.Path + save_by_filename( + self.temp_filename1, + pathlib.Path(self.temp_filename2), + self.cube1, + pp.save, + ) + + # Compare files + self.assertEqual( + self.file_checksum(self.temp_filename2), + self.file_checksum(self.temp_filename1), + CHKSUM_ERR.format(self.ext), + ) + def test_filehandle(self): # Save using iris.save and pp.save save_by_filehandle( diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index e88eaabaed0..d33b76ddeb0 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -12,37 +12,58 @@ import iris.tests as tests # isort:skip from io import BytesIO +from pathlib import Path import iris.fileformats as iff import iris.io class TestDecodeUri(tests.IrisTest): - def test_decode_uri(self): + def test_decode_uri__str(self): tests = { - "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp": ( + (uri := "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( "file", - "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp", + uri, ), - r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp": ( + (uri := r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp"): ( "file", - r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp", + uri, ), - "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp": ( + ( + uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp" + ): ( + uri[:4], + uri[5:], + ), + (uri := "http://www.somehost.com:8080/resource/thing.grib"): ( + uri[:4], + uri[5:], + ), + (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( "file", - "///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp", + uri, ), - "http://www.somehost.com:8080/resource/thing.grib": ( - "http", - "//www.somehost.com:8080/resource/thing.grib", + } + for uri, expected in tests.items(): + self.assertEqual(expected, iris.io.decode_uri(uri)) + + def test_decode_uri__path(self): + tests = { + (uri := "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( + "file", + uri, + ), + (uri := r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp"): ( + "file", + uri, ), - "/data/local/someDir/2013-11-25T13:49:17.632797": ( + (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( "file", - "/data/local/someDir/2013-11-25T13:49:17.632797", + uri, ), } - for uri, pair in tests.items(): - self.assertEqual(pair, iris.io.decode_uri(uri)) + for uri, expected in tests.items(): + self.assertEqual(expected, iris.io.decode_uri(Path(uri))) class TestFileFormatPicker(tests.IrisTest): diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 0674768a549..d21b40ee262 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -11,6 +11,11 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip +import pathlib +from unittest import mock + +import netCDF4 + import iris import iris.io @@ -22,6 +27,13 @@ def test_normal(self): cubes = iris.load(paths) self.assertEqual(len(cubes), 1) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load(paths) + self.assertEqual(len(cubes), 1) + def test_nonexist(self): paths = ( tests.get_data_path(["PP", "aPPglob1", "global.pp"]), @@ -71,6 +83,12 @@ def test_normal(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) _ = iris.load_cube(paths) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + _ = iris.load_cube(paths) + def test_not_enough(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) with self.assertRaises(iris.exceptions.ConstraintMismatchError): @@ -92,6 +110,13 @@ def test_normal(self): cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load_cubes(paths) + self.assertEqual(len(cubes), 1) + def test_not_enough(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) with self.assertRaises(iris.exceptions.ConstraintMismatchError): @@ -111,19 +136,35 @@ def test_too_many(self): iris.load_cube(paths) -class TestOpenDAP(tests.IrisTest): - def test_load(self): - # Check that calling iris.load_* with a http URI triggers a call to - # ``iris.io.load_http`` +@tests.skip_data +class TestLoadRaw(tests.IrisTest): + def test_normal(self): + paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) + cubes = iris.load_raw(paths) + self.assertEqual(len(cubes), 1) + + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load_raw(paths) + self.assertEqual(len(cubes), 1) + - url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" +class TestOPeNDAP(tests.IrisTest): + def setUp(self): + self.url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" + + def test_load_http_called(self): + # Check that calling iris.load_* with an http URI triggers a call to + # ``iris.io.load_http`` class LoadHTTPCalled(Exception): pass def new_load_http(passed_urls, *args, **kwargs): self.assertEqual(len(passed_urls), 1) - self.assertEqual(url, passed_urls[0]) + self.assertEqual(self.url, passed_urls[0]) raise LoadHTTPCalled() try: @@ -137,11 +178,28 @@ def new_load_http(passed_urls, *args, **kwargs): iris.load_cubes, ]: with self.assertRaises(LoadHTTPCalled): - fn(url) + fn(self.url) finally: iris.io.load_http = orig + def test_netCDF_Dataset_call(self): + # Check that load_http calls netCDF4.Dataset and supplies the expected URL. + + # To avoid making a request to an OPeNDAP server in a test, instead + # mock the call to netCDF.Dataset so that it returns a dataset for a + # local file. + filename = tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + fake_dataset = netCDF4.Dataset(filename) + + with mock.patch( + "netCDF4.Dataset", return_value=fake_dataset + ) as dataset_loader: + next(iris.io.load_http([self.url], callback=None)) + dataset_loader.assert_called_with(self.url, mode="r") + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 06bedd497b5..a71385b5bcd 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -211,7 +211,10 @@ def test_grid(self): def test_default_projection_and_extent(self): self.assertEqual( - iplt.default_projection(self.cube), ccrs.PlateCarree() + iplt.default_projection(self.cube), + ccrs.PlateCarree( + globe=self.cube.coord_system("CoordSystem").as_cartopy_globe() + ), ) np_testing.assert_array_almost_equal( iplt.default_projection_extent(self.cube), diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2c22c6d0886..8cdbe272574 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -218,6 +218,16 @@ def test_load_merc_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_merc.cml")) + def test_load_merc_false_en_grid(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes false easting and northing + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "false_east_north_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_false.cml")) + def test_load_stereographic_grid(self): # Test loading a single CF-netCDF file with a stereographic # grid_mapping. diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index cf921ae2104..ec7f8d10238 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -276,103 +276,5 @@ def test_output_file(self): self.assertFilesEqual(filename, "incompatible_cubes.str.txt") -@tests.skip_data -class TestAsCompatibleShape(tests.IrisTest): - def test_slice(self): - cube = tests.stock.realistic_4d() - sliced = cube[1, :, 2, :-2] - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced, cube) - self.assertEqual(res, expected) - - def test_transpose(self): - cube = tests.stock.realistic_4d() - transposed = cube.copy() - transposed.transpose() - expected = cube - res = iris.util.as_compatible_shape(transposed, cube) - self.assertEqual(res, expected) - - def test_slice_and_transpose(self): - cube = tests.stock.realistic_4d() - sliced_and_transposed = cube[1, :, 2, :-2] - sliced_and_transposed.transpose() - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced_and_transposed, cube) - self.assertEqual(res, expected) - - def test_collapsed(self): - cube = tests.stock.realistic_4d() - collapsed = cube.collapsed("model_level_number", iris.analysis.MEAN) - expected_shape = list(cube.shape) - expected_shape[1] = 1 - expected_data = collapsed.data.reshape(expected_shape) - res = iris.util.as_compatible_shape(collapsed, cube) - self.assertCML( - res, ("util", "as_compatible_shape_collapsed.cml"), checksum=False - ) - self.assertMaskedArrayEqual(expected_data, res.data) - - def test_reduce_dimensionality(self): - # Test that as_compatible_shape() can demote - # length one dimensions to scalars. - cube = tests.stock.realistic_4d() - src = cube[:, 2:3] - expected = reduced = cube[:, 2] - res = iris.util.as_compatible_shape(src, reduced) - self.assertEqual(res, expected) - - def test_anonymous_dims(self): - cube = tests.stock.realistic_4d() - # Move all coords from dim_coords to aux_coords. - for coord in cube.dim_coords: - dim = cube.coord_dims(coord) - cube.remove_coord(coord) - cube.add_aux_coord(coord, dim) - - sliced = cube[1, :, 2, :-2] - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced, cube) - self.assertEqual(res, expected) - - def test_scalar_auxcoord(self): - def dim_to_aux(cube, coord_name): - """Convert coordinate on cube from DimCoord to AuxCoord.""" - coord = cube.coord(coord_name) - coord = iris.coords.AuxCoord.from_coord(coord) - cube.replace_coord(coord) - - cube = tests.stock.realistic_4d() - src = cube[:, :, 3] - dim_to_aux(src, "grid_latitude") - expected = cube[:, :, 3:4] - dim_to_aux(expected, "grid_latitude") - res = iris.util.as_compatible_shape(src, cube) - self.assertEqual(res, expected) - - def test_2d_auxcoord_transpose(self): - dim_coord1 = iris.coords.DimCoord(range(3), long_name="first_dim") - dim_coord2 = iris.coords.DimCoord(range(4), long_name="second_dim") - aux_coord_2d = iris.coords.AuxCoord( - np.arange(12).reshape(3, 4), long_name="spanning" - ) - aux_coord_2d_T = iris.coords.AuxCoord( - np.arange(12).reshape(3, 4).T, long_name="spanning" - ) - src = iris.cube.Cube( - np.ones((3, 4)), - dim_coords_and_dims=[(dim_coord1, 0), (dim_coord2, 1)], - aux_coords_and_dims=[(aux_coord_2d, (0, 1))], - ) - target = iris.cube.Cube( - np.ones((4, 3)), - dim_coords_and_dims=[(dim_coord1, 1), (dim_coord2, 0)], - aux_coords_and_dims=[(aux_coord_2d_T, (0, 1))], - ) - - res = iris.util.as_compatible_shape(src, target) - self.assertEqual(res[0], target[0]) - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index a44ccb32bda..ecaa028ab3d 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -16,11 +16,13 @@ import numpy as np from iris import load_cube -from iris.analysis._area_weighted import AreaWeightedRegridder +from iris.analysis._area_weighted import ( + AreaWeightedRegridder, + _regrid_area_weighted_rectilinear_src_and_grid__prepare, +) from iris.coord_systems import GeogCS from iris.coords import DimCoord from iris.cube import Cube -import iris.experimental.regrid as eregrid class Test(tests.IrisTest): @@ -46,19 +48,17 @@ def extract_grid(self, cube): def check_mdtol(self, mdtol=None): src_grid, target_grid = self.grids() # Get _regrid_info result - _regrid_info = ( - eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare( - src_grid, target_grid - ) + _regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( + src_grid, target_grid ) self.assertEqual(len(_regrid_info), 10) with mock.patch( - "iris.experimental.regrid." + "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__prepare", return_value=_regrid_info, ) as prepare: with mock.patch( - "iris.experimental.regrid." + "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__perform", return_value=mock.sentinel.result, ) as perform: @@ -253,7 +253,6 @@ class TestLazy(tests.IrisTest): # Setup def setUp(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( ["NetCDF", "regrid", "regrid_xyt.nc"] ) diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 4dfa1a4a2ec..8649cc55ea0 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -52,7 +52,7 @@ def setUp(self): 1, ) - self.tcs = iris.coord_systems.GeogCS(6000000) + self.tcs = iris.coord_systems.GeogCS(6371229) def test_is_iris_coord_system(self): res, _ = project(self.cube, self.tcs) diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py index e9294f27dc6..f5c882a9839 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py @@ -33,7 +33,7 @@ def _check_angles_calculation( u_cube.rename("dx") u_cube.data[...] = 0 v_cube = u_cube.copy() - v_cube.name("dy") + v_cube.rename("dy") # Define 6 different vectors, repeated in each data row. in_vu = np.array([(0, 1), (2, -1), (-1, -1), (-3, 1), (2, 0), (0, 0)]) @@ -71,7 +71,7 @@ def _check_angles_calculation( ang_diffs = out_angs - expect_angs # Fix for null vectors, and +/-360 differences. ang_diffs[np.abs(out_mags) < 0.001] = 0.0 - ang_diffs = ang_diffs % 360.0 + ang_diffs[np.isclose(np.abs(ang_diffs), 360.0)] = 0.0 # Check that any differences are very small. self.assertArrayAllClose(ang_diffs, 0.0) @@ -97,7 +97,7 @@ def test_angles_from_grid(self): u_cube.rename("dx") u_cube.data[...] = 1.0 v_cube = u_cube.copy() - v_cube.name("dy") + v_cube.rename("dy") v_cube.data[...] = 0.0 # Setup a fake angles result from the inner call to 'gridcell_angles'. diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 9e3af90603e..eafaa20ec88 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -343,8 +343,8 @@ def test_orig_coords(self): def test_magnitude_preservation(self): u, v = self._uv_cubes_limited_extent() ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - orig_sq_mag = u.data ** 2 + v.data ** 2 - res_sq_mag = ut.data ** 2 + vt.data ** 2 + orig_sq_mag = u.data**2 + v.data**2 + res_sq_mag = ut.data**2 + vt.data**2 self.assertArrayAllClose(orig_sq_mag, res_sq_mag, rtol=5e-4) def test_data_values(self): @@ -437,9 +437,9 @@ def test_rotated_to_osgb(self): self.assertArrayEqual(expected_mask, vt.data.mask) # Check unmasked values have sufficiently small error in mag. - expected_mag = np.sqrt(u.data ** 2 + v.data ** 2) + expected_mag = np.sqrt(u.data**2 + v.data**2) # Use underlying data to ignore mask in calculation. - res_mag = np.sqrt(ut.data.data ** 2 + vt.data.data ** 2) + res_mag = np.sqrt(ut.data.data**2 + vt.data.data**2) # Calculate percentage error (note there are no zero magnitudes # so we can divide safely). anom = 100.0 * np.abs(res_mag - expected_mag) / expected_mag diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index f0dba837489..a018507fb35 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -33,7 +33,7 @@ def setUp(self): self.xs, self.ys = np.meshgrid(self.x.points, self.y.points) def transformation(x, y): - return x + y ** 2 + return x + y**2 # Construct a function which adds dimensions to the 2D data array # so that we can test higher dimensional functionality. diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 1fa579ef94d..88a88be5674 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -286,13 +286,15 @@ def test_class_cellmeasuremetadata(self): ) def test_class_connectivitymetadata(self): - self.args.update(dict(cf_role=None, start_index=None, src_dim=None)) + self.args.update( + dict(cf_role=None, start_index=None, location_axis=None) + ) metadata = ConnectivityMetadata(**self.args) self.item.metadata = metadata expected = metadata._asdict() del expected["cf_role"] del expected["start_index"] - del expected["src_dim"] + del expected["location_axis"] self.assertEqual(self.item._metadata_manager.values, expected) self.assertIsNot( self.item._metadata_manager.attributes, metadata.attributes diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index b7c59ff5660..0c20f16f5a6 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -36,7 +36,7 @@ def test_vectorise_call(self): # The reason we use numpy.vectorize is to support multi-dimensional # coordinate points. def fn(coord, v): - return v ** 2 + return v**2 with mock.patch( "numpy.vectorize", return_value=self.vectorised diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index 33efaef9dac..8a37a8fcc55 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -29,7 +29,8 @@ def test_repr(self): "Mercator(longitude_of_projection_origin=90.0, " "ellipsoid=GeogCS(semi_major_axis=6377563.396, " "semi_minor_axis=6356256.909), " - "standard_parallel=0.0)" + "standard_parallel=0.0, " + "false_easting=0.0, false_northing=0.0)" ) self.assertEqual(expected, repr(self.tm)) @@ -38,16 +39,23 @@ class Test_init_defaults(tests.IrisTest): def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. crs = Mercator( - longitude_of_projection_origin=27, standard_parallel=157.4 + longitude_of_projection_origin=27, + standard_parallel=157.4, + false_easting=13, + false_northing=12, ) self.assertEqualAndKind(crs.longitude_of_projection_origin, 27.0) self.assertEqualAndKind(crs.standard_parallel, 157.4) + self.assertEqualAndKind(crs.false_easting, 13.0) + self.assertEqualAndKind(crs.false_northing, 12.0) def _check_crs_defaults(self, crs): # Check for property defaults when no kwargs options were set. # NOTE: except ellipsoid, which is done elsewhere. self.assertEqualAndKind(crs.longitude_of_projection_origin, 0.0) self.assertEqualAndKind(crs.standard_parallel, 0.0) + self.assertEqualAndKind(crs.false_easting, 0.0) + self.assertEqualAndKind(crs.false_northing, 0.0) def test_no_optional_args(self): # Check expected defaults with no optional args. @@ -57,7 +65,10 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. crs = Mercator( - longitude_of_projection_origin=None, standard_parallel=None + longitude_of_projection_origin=None, + standard_parallel=None, + false_easting=None, + false_northing=None, ) self._check_crs_defaults(crs) @@ -77,6 +88,8 @@ def test_extra_kwargs(self): # converted to a cartopy CRS. longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -85,6 +98,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -95,6 +110,8 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) res = merc_cs.as_cartopy_crs() @@ -113,6 +130,8 @@ def test_simple(self): def test_extra_kwargs(self): longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -121,6 +140,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -131,6 +152,8 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) res = merc_cs.as_cartopy_projection() diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index e94ad0cf701..4d520ac4149 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -433,11 +433,18 @@ def test_non_time_values(self): units="m", attributes={"notes": "Measured from sea level"}, ) - expected = ( - "AncillaryVariable(array([2, 5, 9]), " - "standard_name='height', units=Unit('m'), " - "long_name='height of detector', var_name='height', " - "attributes={'notes': 'Measured from sea level'})" + expected = "\n".join( + [ + "AncillaryVariable : height / (m)", + " data: [2, 5, 9]", + " shape: (3,)", + " dtype: int64", + " standard_name: 'height'", + " long_name: 'height of detector'", + " var_name: 'height'", + " attributes:", + " notes 'Measured from sea level'", + ] ) self.assertEqual(expected, ancillary_var.__str__()) @@ -447,11 +454,20 @@ def test_time_values(self): units="hours since 1970-01-01 01:00", long_name="time of previous valid detection", ) - expected = ( - "AncillaryVariable([1970-01-01 03:00:00, " - "1970-01-01 06:00:00, 1970-01-01 10:00:00], " - "standard_name=None, calendar='gregorian', " - "long_name='time of previous valid detection')" + expected = "\n".join( + [ + ( + "AncillaryVariable : time of previous valid detection / " + "(hours since 1970-01-01 01:00, gregorian calendar)" + ), + ( + " data: [1970-01-01 03:00:00, 1970-01-01 06:00:00, " + "1970-01-01 10:00:00]" + ), + " shape: (3,)", + " dtype: int64", + " long_name: 'time of previous valid detection'", + ] ) self.assertEqual(expected, ancillary_var.__str__()) @@ -466,12 +482,7 @@ def test_non_time_values(self): units="m", attributes={"notes": "Measured from sea level"}, ) - expected = ( - "AncillaryVariable(array([2, 5, 9]), " - "standard_name='height', units=Unit('m'), " - "long_name='height of detector', var_name='height', " - "attributes={'notes': 'Measured from sea level'})" - ) + expected = "" self.assertEqual(expected, ancillary_var.__repr__()) def test_time_values(self): @@ -481,10 +492,8 @@ def test_time_values(self): long_name="time of previous valid detection", ) expected = ( - "AncillaryVariable(array([2, 5, 9]), standard_name=None, " - "units=Unit('hours since 1970-01-01 01:00', " - "calendar='gregorian'), " - "long_name='time of previous valid detection')" + "" ) self.assertEqual(expected, ancillary_var.__repr__()) diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index c5016e6c735..0bd66c6e980 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -93,30 +93,29 @@ def test_copy(self): copy_measure = self.measure.copy(new_vals) self.assertArrayEqual(copy_measure.data, new_vals) - def test_repr_other_metadata(self): - expected = ( - ", long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'}" - ) - self.assertEqual(self.measure._repr_other_metadata(), expected) - def test___str__(self): - expected = ( - "CellMeasure(array([10., 12., 16., 9.]), " - "measure='area', standard_name='cell_area', " - "units=Unit('m^2'), long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'})" + expected = "\n".join( + [ + "CellMeasure : cell_area / (m^2)", + " data: [10., 12., 16., 9.]", + " shape: (4,)", + " dtype: float64", + " standard_name: 'cell_area'", + " long_name: 'measured_area'", + " var_name: 'area'", + " attributes:", + " notes '1m accuracy'", + " measure: 'area'", + ] ) self.assertEqual(self.measure.__str__(), expected) def test___repr__(self): expected = ( - "CellMeasure(array([10., 12., 16., 9.]), " - "measure='area', standard_name='cell_area', " - "units=Unit('m^2'), long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'})" + "" ) - self.assertEqual(self.measure.__repr__(), expected) + self.assertEqual(expected, self.measure.__repr__()) def test__eq__(self): self.assertEqual(self.measure, self.measure) diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 640dbcd1315..43170b6c4e6 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -883,9 +883,14 @@ def test_short_time_interval(self): coord = DimCoord( [5], standard_name="time", units="days since 1970-01-01" ) - expected = ( - "DimCoord([1970-01-06 00:00:00], standard_name='time', " - "calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + " points: [1970-01-06 00:00:00]", + " shape: (1,)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) @@ -895,11 +900,17 @@ def test_short_time_interval__bounded(self): [5, 6], standard_name="time", units="days since 1970-01-01" ) coord.guess_bounds() - expected = ( - "DimCoord([1970-01-06 00:00:00, 1970-01-07 00:00:00], " - "bounds=[[1970-01-05 12:00:00, 1970-01-06 12:00:00],\n" - " [1970-01-06 12:00:00, 1970-01-07 12:00:00]], " - "standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + " points: [1970-01-06 00:00:00, 1970-01-07 00:00:00]", + " bounds: [", + " [1970-01-05 12:00:00, 1970-01-06 12:00:00],", + " [1970-01-06 12:00:00, 1970-01-07 12:00:00]]", + " shape: (2,) bounds(2, 2)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) @@ -908,7 +919,15 @@ def test_long_time_interval(self): coord = DimCoord( [5], standard_name="time", units="years since 1970-01-01" ) - expected = "DimCoord([5], standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + " points: [5]", + " shape: (1,)", + " dtype: int64", + " standard_name: 'time'", + ] + ) result = coord.__str__() self.assertEqual(expected, result) @@ -917,16 +936,31 @@ def test_long_time_interval__bounded(self): [5, 6], standard_name="time", units="years since 1970-01-01" ) coord.guess_bounds() - expected = ( - "DimCoord([5 6], bounds=[[4.5 5.5]\n [5.5 6.5]], " - "standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + " points: [5, 6]", + " bounds: [", + " [4.5, 5.5],", + " [5.5, 6.5]]", + " shape: (2,) bounds(2, 2)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) def test_non_time_unit(self): coord = DimCoord([1.0]) - expected = repr(coord) + expected = "\n".join( + [ + "DimCoord : unknown / (unknown)", + " points: [1.]", + " shape: (1,)", + " dtype: float64", + ] + ) result = coord.__str__() self.assertEqual(expected, result) diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 82bd51a8aff..fd10a6f2643 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -9,7 +9,22 @@ # importing anything else. import iris.tests as tests # isort:skip -from iris.coords import _DimensionalMetadata + +from cf_units import Unit +import numpy as np + +import iris._lazy_data as lazy +from iris.coord_systems import GeogCS +from iris.coords import ( + AncillaryVariable, + AuxCoord, + CellMeasure, + DimCoord, + _DimensionalMetadata, +) +from iris.experimental.ugrid.mesh import Connectivity +from iris.tests.stock import climatology_3d as cube_with_climatology +from iris.tests.stock.mesh import sample_meshcoord class Test___init____abstractmethod(tests.IrisTest): @@ -22,5 +37,1046 @@ def test(self): _ = _DimensionalMetadata(0) +class Mixin__string_representations: + """ + Common testcode for generic `__str__`, `__repr__` and `summary` methods. + + Effectively, __str__ and __repr__ are thin wrappers around `summary`. + These are used by all the subclasses : notably Coord/DimCoord/AuxCoord, + but also AncillaryVariable, CellMeasure and MeshCoord. + + There are a lot of different aspects to consider: + + * different object classes with different class-specific properties + * changing with array sizes + dimensionalities + * masked data + * data types : int, float, string and (special) dates + * for Coords, handling of bounds + * "summary" controls (also can be affected by numpy printoptions). + + NOTE: since the details of formatting are important to us here, the basic + test method is to check printout results against an exact 'snapshot' + embedded (visibly) in the test itself. + + """ + + def repr_str_strings(self, dm, linewidth=55): + """ + Return a simple combination of repr and str printouts. + + N.B. we control linewidth to make the outputs easier to compare. + """ + with np.printoptions(linewidth=linewidth): + result = repr(dm) + "\n" + str(dm) + return result + + def sample_data(self, datatype=float, units="m", shape=(5,), masked=False): + """Make a sample data array for a test _DimensionalMetadata object.""" + # Get an actual Unit + units = Unit(units) + if units.calendar: + # fix string datatypes for date-based units + datatype = float + + # Get a dtype + dtype = np.dtype(datatype) + + # Make suitable test values for type/shape/masked + length = int(np.prod(shape)) + if dtype.kind == "U": + # String content. + digit_strs = [str(i) * (i + 1) for i in range(0, 10)] + if length < 10: + # ['0', '11', '222, '3333', ..] + values = np.array(digit_strs[:length]) + else: + # [... '9999999999', '0', '11' ....] + indices = [(i % 10) for i in range(length)] + values = np.array(digit_strs)[indices] + else: + # numeric content : a simple [0, 1, 2 ...] + values = np.arange(length).astype(dtype) + + if masked: + if np.prod(shape) >= 3: + # Mask 1 in 3 points : [x -- x x -- x ...] + i_firstmasked = 1 + else: + # Few points, mask 1 in 3 starting at 0 [-- x x -- x x -- ...] + i_firstmasked = 0 + masked_points = [(i % 3) == i_firstmasked for i in range(length)] + values = np.ma.masked_array(values, mask=masked_points) + + values = values.reshape(shape) + return values + + # Make a sample Coord, as _DimensionalMetadata is abstract and this is the + # obvious concrete subclass to use for testing + def sample_coord( + self, + datatype=float, + dates=False, + units="m", + long_name="x", + shape=(5,), + masked=False, + bounded=False, + dimcoord=False, + lazy_points=False, + lazy_bounds=False, + *coord_args, + **coord_kwargs, + ): + if masked: + dimcoord = False + if dates: + # Use a pre-programmed date unit. + units = Unit("days since 1970-03-5") + if not isinstance(units, Unit): + # This operation is *not* a no-op, it will wipe calendars ! + units = Unit(units) + values = self.sample_data( + datatype=datatype, units=units, shape=shape, masked=masked + ) + cls = DimCoord if dimcoord else AuxCoord + coord = cls( + points=values, + units=units, + long_name=long_name, + *coord_args, + **coord_kwargs, + ) + if bounded or lazy_bounds: + if shape == (1,): + # Guess-bounds doesn't work ! + val = coord.points[0] + bounds = [val - 10, val + 10] + # NB preserve masked/unmasked : avoid converting masks to NaNs + if np.ma.isMaskedArray(coord.points): + array = np.ma.array + else: + array = np.array + coord.bounds = array(bounds) + else: + coord.guess_bounds() + if lazy_points: + coord.points = lazy.as_lazy_data(coord.points) + if lazy_bounds: + coord.bounds = lazy.as_lazy_data(coord.bounds) + return coord + + def coord_representations(self, *args, **kwargs): + """ + Create a test coord and return its string representations. + + Pass args+kwargs to 'sample_coord' and return the 'repr_str_strings'. + + """ + coord = self.sample_coord(*args, **kwargs) + return self.repr_str_strings(coord) + + def assertLines(self, list_of_expected_lines, string_result): + """ + Assert equality between a result and expected output lines. + + For convenience, the 'expected lines' are joined with a '\\n', + because a list of strings is nicer to construct in code. + They should then match the actual result, which is a simple string. + + """ + self.assertEqual(list_of_expected_lines, string_result.split("\n")) + + +class Test__print_common(Mixin__string_representations, tests.IrisTest): + """ + Test aspects of __str__ and __repr__ output common to all + _DimensionalMetadata instances. + I.E. those from CFVariableMixin, plus values array (data-manager). + + Aspects : + * standard_name: + * long_name: + * var_name: + * attributes + * units + * shape + * dtype + + """ + + def test_simple(self): + result = self.coord_representations() + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_minimal(self): + result = self.coord_representations( + long_name=None, units=None, shape=(1,) + ) + expected = [ + "", + "AuxCoord : unknown / (unknown)", + " points: [0.]", + " shape: (1,)", + " dtype: float64", + ] + self.assertLines(expected, result) + + def test_names(self): + result = self.coord_representations( + standard_name="height", long_name="this", var_name="x_var" + ) + expected = [ + "", + "AuxCoord : height / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " standard_name: 'height'", + " long_name: 'this'", + " var_name: 'x_var'", + ] + self.assertLines(expected, result) + + def test_bounded(self): + result = self.coord_representations(shape=(3,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " [ 1.5, 2.5]]", + " shape: (3,) bounds(3, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_masked(self): + result = self.coord_representations(masked=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0.0, -- , 2.0, 3.0, -- ]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dtype_int(self): + result = self.coord_representations(units="1", datatype=np.int16) + expected = [ + "", + "AuxCoord : x / (1)", + " points: [0, 1, 2, 3, 4]", + " shape: (5,)", + " dtype: int16", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dtype_date(self): + # Note: test with a date 'longer' than the built-in one in + # 'sample_coord(dates=True)', because it includes a time-of-day + full_date_unit = Unit( + "days since 1892-05-17 03:00:25", calendar="360_day" + ) + result = self.coord_representations(units=full_date_unit) + expected = [ + ( + "" + ), + ( + "AuxCoord : x / (days since 1892-05-17 03:00:25, " + "360_day calendar)" + ), + " points: [", + " 1892-05-17 03:00:25, 1892-05-18 03:00:25,", + " 1892-05-19 03:00:25, 1892-05-20 03:00:25,", + " 1892-05-21 03:00:25]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_attributes(self): + # NOTE: scheduled for future change, to put each attribute on a line + coord = self.sample_coord( + attributes={ + "array": np.arange(7.0), + "list": [1, 2, 3], + "empty": [], + "None": None, + "string": "this", + "long_long_long_long_long_name": 3, + "other": ( + "long_long_long_long_long_long_long_long_" + "long_long_long_long_long_long_long_long_value" + ), + "float": 4.3, + } + ) + result = self.repr_str_strings(coord) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + " attributes:", + " array [0. 1. 2. 3. 4. 5. 6.]", + " list [1, 2, 3]", + " empty []", + " None None", + " string 'this'", + " long_long_long_long_long_name 3", + ( + " other " + "'long_long_long_long_long_long_long_long_" + "long_long_long_long_long_long..." + ), + " float 4.3", + ] + self.assertLines(expected, result) + + def test_lazy_points(self): + result = self.coord_representations(lazy_points=True) + expected = [ + " shape(5,)>", + "AuxCoord : x / (m)", + " points: ", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_lazy_bounds(self): + result = self.coord_representations(lazy_bounds=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " bounds: ", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_lazy_points_and_bounds(self): + result = self.coord_representations(lazy_points=True, lazy_bounds=True) + expected = [ + "+bounds shape(5,)>", + "AuxCoord : x / (m)", + " points: ", + " bounds: ", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_scalar(self): + result = self.coord_representations(shape=(1,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0.]", + " bounds: [[-10., 10.]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_scalar_masked(self): + result = self.coord_representations( + shape=(1,), bounded=True, masked=True + ) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [--]", + " bounds: [[--, --]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_short(self): + result = self.coord_representations(shape=(2,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5]]", + " shape: (2,) bounds(2, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_medium(self): + # Where bounds are truncated, but points not. + result = self.coord_representations(shape=(14,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,", + " 9., 10., 11., 12., 13.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [11.5, 12.5],", + " [12.5, 13.5]]", + " shape: (14,) bounds(14, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_long(self): + # Completely truncated representations + result = self.coord_representations(shape=(150,), bounded=True) + expected = [ + ( + "" + ), + "AuxCoord : x / (m)", + " points: [ 0., 1., ..., 148., 149.]", + " bounds: [", + " [ -0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [147.5, 148.5],", + " [148.5, 149.5]]", + " shape: (150,) bounds(150, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_strings(self): + result = self.coord_representations(datatype=str) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , 11 , 222 , 3333 , 44444]", + " shape: (5,)", + " dtype: ", + "AuxCoord : x / (m)", + " points: [", + " 0 , 11 , 222 ,", + " 3333 , 44444 , 555555 ,", + " 6666666 , 77777777 , 888888888 ,", + " 9999999999, 0 , 11 ,", + " 222 , 3333 , 44444 ]", + " shape: (15,)", + " dtype: ", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [1970-03-05 00:00:00, 1970-03-06 00:00:00]", + " shape: (2,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dates_scalar(self): + # Printouts for a scalar date coord. + # Demonstrate that a "typical" datetime coord can print with the date + # value visible in the repr. + long_time_unit = Unit("hours since 2025-03-23 01:00:00") + coord = self.sample_coord( + standard_name="time", + long_name=None, + shape=(1,), + units=long_time_unit, + ) + # Do this one with a default linewidth, not our default reduced one, so + # that we can get the date value in the repr output. + result = self.repr_str_strings(coord, linewidth=None) + expected = [ + ( + "" + ), + ( + "AuxCoord : time / (hours since 2025-03-23 01:00:00, " + "gregorian calendar)" + ), + " points: [2025-03-23 01:00:00]", + " shape: (1,)", + " dtype: float64", + " standard_name: 'time'", + ] + self.assertLines(expected, result) + + def test_dates_bounds(self): + result = self.coord_representations(dates=True, bounded=True) + expected = [ + "", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + " 1970-03-05 00:00:00, 1970-03-06 00:00:00,", + " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", + " 1970-03-09 00:00:00]", + " bounds: [", + " [1970-03-04 12:00:00, 1970-03-05 12:00:00],", + " [1970-03-05 12:00:00, 1970-03-06 12:00:00],", + " [1970-03-06 12:00:00, 1970-03-07 12:00:00],", + " [1970-03-07 12:00:00, 1970-03-08 12:00:00],", + " [1970-03-08 12:00:00, 1970-03-09 12:00:00]]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dates_masked(self): + result = self.coord_representations(dates=True, masked=True) + expected = [ + "", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + " 1970-03-05 00:00:00, -- ,", + " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", + " -- ]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_untypical_bounds(self): + # Check printing when n-bounds > 2 + coord = self.sample_coord() + bounds = coord.points.reshape((5, 1)) + np.array([[-3.0, -2, 2, 3]]) + coord.bounds = bounds + result = self.repr_str_strings(coord) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " bounds: [", + " [-3., -2., 2., 3.],", + " [-2., -1., 3., 4.],", + " ...,", + " [ 0., 1., 5., 6.],", + " [ 1., 2., 6., 7.]]", + " shape: (5,) bounds(5, 4)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_multidimensional(self): + # Demonstrate formatting of multdimensional arrays + result = self.coord_representations(shape=(7, 5, 3)) + # This one is a bit unavoidably long .. + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " [[ 0., 1., 2.],", + " [ 3., 4., 5.],", + " ...,", + " [ 9., 10., 11.],", + " [ 12., 13., 14.]],", + " ", + " [[ 15., 16., 17.],", + " [ 18., 19., 20.],", + " ...,", + " [ 24., 25., 26.],", + " [ 27., 28., 29.]],", + " ", + " ...,", + " ", + " [[ 75., 76., 77.],", + " [ 78., 79., 80.],", + " ...,", + " [ 84., 85., 86.],", + " [ 87., 88., 89.]],", + " ", + " [[ 90., 91., 92.],", + " [ 93., 94., 95.],", + " ...,", + " [ 99., 100., 101.],", + " [102., 103., 104.]]]", + " shape: (7, 5, 3)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_multidimensional_small(self): + # Demonstrate that a small-enough multidim will print in the repr. + result = self.coord_representations(shape=(2, 2), datatype=int) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " [0, 1],", + " [2, 3]]", + " shape: (2, 2)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_short(self): + result = self.coord_representations(datatype=np.int16) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0, 1, 2, 3, 4]", + " shape: (5,)", + " dtype: int16", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_masked(self): + result = self.coord_representations(datatype=int, masked=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , --, 2 , 3 , --]", + " shape: (5,)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_masked_long(self): + result = self.coord_representations( + shape=(20,), datatype=int, masked=True + ) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , --, ..., 18, --]", + " shape: (20,)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + +class Test__print_Coord(Mixin__string_representations, tests.IrisTest): + """ + Test Coord-specific aspects of __str__ and __repr__ output. + + Aspects : + * DimCoord / AuxCoord + * coord_system + * climatological + * circular + + """ + + def test_dimcoord(self): + result = self.coord_representations(dimcoord=True) + expected = [ + "", + "DimCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_coord_system(self): + result = self.coord_representations(coord_system=GeogCS(1000.0)) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + " coord_system: GeogCS(1000.0)", + ] + self.assertLines(expected, result) + + def test_climatological(self): + cube = cube_with_climatology() + coord = cube.coord("time") + coord = coord[:1] # Just to make it a bit shorter + result = self.repr_str_strings(coord) + expected = [ + ( + "" + ), + ( + "DimCoord : time / (days since 1970-01-01 00:00:00-00, " + "gregorian calendar)" + ), + " points: [2001-01-10 00:00:00]", + " bounds: [[2001-01-10 00:00:00, 2011-01-10 00:00:00]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " standard_name: 'time'", + " climatological: True", + ] + self.assertLines(expected, result) + + def test_circular(self): + coord = self.sample_coord(shape=(2,), dimcoord=True) + coord.circular = True + result = self.repr_str_strings(coord) + expected = [ + "", + "DimCoord : x / (m)", + " points: [0., 1.]", + " shape: (2,)", + " dtype: float64", + " long_name: 'x'", + " circular: True", + ] + self.assertLines(expected, result) + + +class Test__print_noncoord(Mixin__string_representations, tests.IrisTest): + """ + Limited testing of other _DimensionalMetadata subclasses. + + * AncillaryVariable + * CellMeasure + * Connectivity + * MeshCoord + + """ + + def test_ancillary(self): + # Check we can print an AncillaryVariable + # Practically, ~identical to an AuxCoord, but without bounds, and the + # array is called 'data'. + data = self.sample_data() + ancil = AncillaryVariable(data, long_name="v_aux", units="m s-1") + result = self.repr_str_strings(ancil) + expected = [ + "", + "AncillaryVariable : v_aux / (m s-1)", + " data: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'v_aux'", + ] + self.assertLines(expected, result) + + def test_cellmeasure(self): + # Check we can print an AncillaryVariable + # N.B. practically, identical to an AuxCoord (without bounds) + # Check we can print an AncillaryVariable + # Practically, ~identical to an AuxCoord, but without bounds, and the + # array is called 'data'. + data = self.sample_data() + cell_measure = CellMeasure( + data, measure="area", long_name="cell_area", units="m^2" + ) + result = self.repr_str_strings(cell_measure) + expected = [ + "", + "CellMeasure : cell_area / (m^2)", + " data: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'cell_area'", + " measure: 'area'", + ] + self.assertLines(expected, result) + + def test_connectivity(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, location_axis, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data, cf_role="edge_node_connectivity", long_name="enc", units="1" + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [0, 1],", + " [2, 3],", + " [4, 5]]", + " shape: (3, 2)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 0", + " location_axis: 0", + ] + self.assertLines(expected, result) + + def test_connectivity__start_index(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, location_axis, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data + 1, + start_index=1, + cf_role="edge_node_connectivity", + long_name="enc", + units="1", + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [1, 2],", + " [3, 4],", + " [5, 6]]", + " shape: (3, 2)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 1", + " location_axis: 0", + ] + self.assertLines(expected, result) + + def test_connectivity__location_axis(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, location_axis, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data.transpose(), + location_axis=1, + cf_role="edge_node_connectivity", + long_name="enc", + units="1", + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [0, 2, 4],", + " [1, 3, 5]]", + " shape: (2, 3)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 0", + " location_axis: 1", + ] + self.assertLines(expected, result) + + def test_meshcoord(self): + meshco = sample_meshcoord() + meshco.mesh.long_name = "test_mesh" # For stable printout of the Mesh + result = self.repr_str_strings(meshco) + expected = [ + ( + "" + ), + "MeshCoord : longitude / (degrees_east)", + " mesh: ", + " location: 'face'", + " points: [3100, 3101, 3102]", + " bounds: [", + " [1100, 1101, 1102, 1103],", + " [1104, 1105, 1106, 1107],", + " [1108, 1109, 1110, 1111]]", + " shape: (3,) bounds(3, 4)", + " dtype: int64", + " standard_name: 'longitude'", + " long_name: 'long-name'", + " attributes:", + " a 1", + " b 'c'", + " axis: 'x'", + ] + self.assertLines(expected, result) + + +class Test_summary(Mixin__string_representations, tests.IrisTest): + """ + Test the controls of the 'summary' method. + """ + + def test_shorten(self): + coord = self.sample_coord() + expected = self.repr_str_strings(coord) + result = coord.summary(shorten=True) + "\n" + coord.summary() + self.assertEqual(expected, result) + + def test_max_values__default(self): + coord = self.sample_coord() + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__2(self): + coord = self.sample_coord() + result = coord.summary(max_values=2) + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., ..., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__bounded__2(self): + coord = self.sample_coord(bounded=True) + result = coord.summary(max_values=2) + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., ..., 3., 4.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [ 2.5, 3.5],", + " [ 3.5, 4.5]]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__0(self): + coord = self.sample_coord(bounded=True) + result = coord.summary(max_values=0) + expected = [ + "AuxCoord : x / (m)", + " points: [...]", + " bounds: [...]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_linewidth__default(self): + coord = self.sample_coord() + coord.points = coord.points + 1000.003 # Make the output numbers wider + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [1000.003, 1001.003, 1002.003, 1003.003, 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + # Show that, when unset, it follows the numpy setting + with np.printoptions(linewidth=35): + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [", + " 1000.003, 1001.003,", + " 1002.003, 1003.003,", + " 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_linewidth__set(self): + coord = self.sample_coord() + coord.points = coord.points + 1000.003 # Make the output numbers wider + expected = [ + "AuxCoord : x / (m)", + " points: [", + " 1000.003, 1001.003,", + " 1002.003, 1003.003,", + " 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + result = coord.summary(linewidth=35) + self.assertLines(expected, result) + + with np.printoptions(linewidth=999): + # Show that, when set, it ignores the numpy setting + result = coord.summary(linewidth=35) + self.assertLines(expected, result) + + def test_convert_dates(self): + coord = self.sample_coord(dates=True) + result = coord.summary() + expected = [ + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + ( + " 1970-03-05 00:00:00, 1970-03-06 00:00:00, " + "1970-03-07 00:00:00," + ), + " 1970-03-08 00:00:00, 1970-03-09 00:00:00]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + result = coord.summary(convert_dates=False) + expected = [ + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 5d6f48fddaf..9a81c79d449 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -29,28 +29,30 @@ def setUp(self): "var_name": "face_nodes", "attributes": {"notes": "this is a test"}, "start_index": 1, - "src_dim": 1, + "location_axis": 1, } self.connectivity = Connectivity(**self.kwargs) def test_cf_role(self): self.assertEqual(self.kwargs["cf_role"], self.connectivity.cf_role) - def test_src_location(self): + def test_location(self): expected = self.kwargs["cf_role"].split("_")[0] - self.assertEqual(expected, self.connectivity.src_location) + self.assertEqual(expected, self.connectivity.location) - def test_tgt_location(self): + def test_connected(self): expected = self.kwargs["cf_role"].split("_")[1] - self.assertEqual(expected, self.connectivity.tgt_location) + self.assertEqual(expected, self.connectivity.connected) def test_start_index(self): self.assertEqual( self.kwargs["start_index"], self.connectivity.start_index ) - def test_src_dim(self): - self.assertEqual(self.kwargs["src_dim"], self.connectivity.src_dim) + def test_location_axis(self): + self.assertEqual( + self.kwargs["location_axis"], self.connectivity.location_axis + ) def test_indices(self): self.assertArrayEqual( @@ -58,7 +60,7 @@ def test_indices(self): ) def test_read_only(self): - attributes = ("indices", "cf_role", "start_index", "src_dim") + attributes = ("indices", "cf_role", "start_index", "location_axis") for attribute in attributes: self.assertRaisesRegex( AttributeError, @@ -70,10 +72,10 @@ def test_read_only(self): ) def test_transpose(self): - expected_dim = 1 - self.kwargs["src_dim"] + expected_dim = 1 - self.kwargs["location_axis"] expected_indices = self.kwargs["indices"].transpose() new_connectivity = self.connectivity.transpose() - self.assertEqual(expected_dim, new_connectivity.src_dim) + self.assertEqual(expected_dim, new_connectivity.location_axis) self.assertArrayEqual(expected_indices, new_connectivity.indices) def test_lazy_indices(self): @@ -87,39 +89,52 @@ def test_core_indices(self): def test_has_lazy_indices(self): self.assertFalse(self.connectivity.has_lazy_indices()) - def test_lazy_src_lengths(self): - self.assertTrue(is_lazy_data(self.connectivity.lazy_src_lengths())) + def test_lazy_location_lengths(self): + self.assertTrue( + is_lazy_data(self.connectivity.lazy_location_lengths()) + ) - def test_src_lengths(self): + def test_location_lengths(self): expected = [4, 4, 4] - self.assertArrayEqual(expected, self.connectivity.src_lengths()) + self.assertArrayEqual(expected, self.connectivity.location_lengths()) def test___str__(self): - expected = ( - "Connectivity(cf_role='face_node_connectivity', start_index=1)" + expected = "\n".join( + [ + "Connectivity : my_face_nodes / (unknown)", + " data: [", + " [ 1, 2, 3],", + " [ 4, 5, 6],", + " [ 7, 8, 9],", + " [10, 11, 12]]", + " shape: (4, 3)", + " dtype: int64", + " long_name: 'my_face_nodes'", + " var_name: 'face_nodes'", + " attributes:", + " notes 'this is a test'", + " cf_role: 'face_node_connectivity'", + " start_index: 1", + " location_axis: 1", + ] ) self.assertEqual(expected, self.connectivity.__str__()) def test___repr__(self): - expected = ( - "Connectivity(array([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9], [10, 11, 12]]), " - "cf_role='face_node_connectivity', long_name='my_face_nodes', " - "var_name='face_nodes', attributes={'notes': 'this is a test'}, " - "start_index=1, src_dim=1)" - ) + expected = "" self.assertEqual(expected, self.connectivity.__repr__()) def test_xml_element(self): doc = minidom.Document() connectivity_element = self.connectivity.xml_element(doc) self.assertEqual(connectivity_element.tagName, "connectivity") - for attribute in ("cf_role", "start_index", "src_dim"): + for attribute in ("cf_role", "start_index", "location_axis"): self.assertIn(attribute, connectivity_element.attributes) def test___eq__(self): equivalent_kwargs = self.kwargs equivalent_kwargs["indices"] = self.kwargs["indices"].transpose() - equivalent_kwargs["src_dim"] = 1 - self.kwargs["src_dim"] + equivalent_kwargs["location_axis"] = 1 - self.kwargs["location_axis"] equivalent = Connectivity(**equivalent_kwargs) self.assertFalse( np.array_equal(equivalent.indices, self.connectivity.indices) @@ -150,16 +165,18 @@ def test_copy(self): copy_connectivity = self.connectivity.copy(new_indices) self.assertArrayEqual(new_indices, copy_connectivity.indices) - def test_indices_by_src(self): + def test_indices_by_location(self): expected = self.kwargs["indices"].transpose() - self.assertArrayEqual(expected, self.connectivity.indices_by_src()) + self.assertArrayEqual( + expected, self.connectivity.indices_by_location() + ) - def test_indices_by_src_input(self): + def test_indices_by_location_input(self): expected = as_lazy_data(self.kwargs["indices"].transpose()) - by_src = self.connectivity.indices_by_src( + by_location = self.connectivity.indices_by_location( self.connectivity.lazy_indices() ) - self.assertArrayEqual(expected, by_src) + self.assertArrayEqual(expected, by_location) class TestAltIndices(tests.IrisTest): @@ -210,14 +227,14 @@ def test_start_index(self): ValueError, "Invalid start_index .", Connectivity, **kwargs ) - def test_src_dim(self): + def test_location_axis(self): kwargs = { "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), "cf_role": "face_node_connectivity", - "src_dim": 2, + "location_axis": 2, } self.assertRaisesRegex( - ValueError, "Invalid src_dim .", Connectivity, **kwargs + ValueError, "Invalid location_axis .", Connectivity, **kwargs ) def test_cf_role(self): @@ -275,7 +292,7 @@ def test_indices_locations_edge(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len=2", + "Not all edges meet requirement: len=2", Connectivity, **kwargs, ) @@ -287,7 +304,7 @@ def test_indices_locations_face(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=3", + "Not all faces meet requirement: len>=3", Connectivity, **kwargs, ) @@ -299,7 +316,7 @@ def test_indices_locations_volume_face(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=4", + "Not all volumes meet requirement: len>=4", Connectivity, **kwargs, ) @@ -311,7 +328,7 @@ def test_indices_locations_volume_edge(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=6", + "Not all volumes meet requirement: len>=6", Connectivity, **kwargs, ) @@ -321,11 +338,11 @@ def test_indices_locations_alt_dim(self): kwargs = { "indices": np.linspace(1, 9, 9, dtype=int).reshape((3, -1)), "cf_role": "volume_face_connectivity", - "src_dim": 1, + "location_axis": 1, } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=4", + "Not all volumes meet requirement: len>=4", Connectivity, **kwargs, ) @@ -342,6 +359,10 @@ def test_indices_locations_masked(self): connectivity = Connectivity(**kwargs) self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=3", + "Not all faces meet requirement: len>=3", connectivity.validate_indices, ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 98086600167..f39f3706ee1 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -106,23 +106,33 @@ def test___getstate__(self): self.assertEqual(expected, self.mesh.__getstate__()) def test___repr__(self): - expected = ( - "Mesh(topology_dimension=1, node_coords_and_axes=[(AuxCoord(" - "array([0, 2, 1]), standard_name='longitude', units=Unit(" - "'unknown'), long_name='long_name', var_name='node_lon', " - "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='node_lat'), 'y')], connectivities=Connectivity(" - "cf_role='edge_node_connectivity', start_index=0), " - "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " - "standard_name='longitude', units=Unit('unknown'), " - "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='edge_lat'), 'y')], long_name='my_topology_mesh', " - "var_name='mesh', attributes={'notes': 'this is a test'}, " - "node_dimension='NodeDim', edge_dimension='EdgeDim')" - ) - self.assertEqual(expected, self.mesh.__repr__()) + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___str__(self): + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 1", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " edge coordinates", + " ", + " ", + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes:", + " notes 'this is a test'", + ] + self.assertEqual(expected, str(self.mesh).split("\n")) def test___eq__(self): # The dimension names do not participate in equality. @@ -201,7 +211,7 @@ def test_connectivities(self): for kwargs in negative_kwargs: self.assertEqual([], func(**kwargs)) - def test_connectivities_locations(self): + def test_connectivities_elements(self): # topology_dimension-specific results. Method intended to be overridden. positive_kwargs = ( {"contains_node": True}, @@ -261,7 +271,7 @@ def test_coords(self): for kwargs in negative_kwargs: self.assertNotIn(self.NODE_LON, func(**kwargs)) - def test_coords_locations(self): + def test_coords_elements(self): # topology_dimension-specific results. Method intended to be overridden. all_expected = { "node_x": self.NODE_LON, @@ -373,33 +383,103 @@ def setUpClass(cls): cls.mesh = mesh.Mesh(**cls.kwargs) def test___repr__(self): - expected = ( - "Mesh(topology_dimension=2, node_coords_and_axes=[(AuxCoord(" - "array([0, 2, 1]), standard_name='longitude', units=Unit(" - "'unknown'), long_name='long_name', var_name='node_lon', " - "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='node_lat'), 'y')], connectivities=[Connectivity(" - "cf_role='face_node_connectivity', start_index=0), Connectivity(" - "cf_role='edge_node_connectivity', start_index=0), Connectivity(" - "cf_role='face_edge_connectivity', start_index=0), Connectivity(" - "cf_role='face_face_connectivity', start_index=0), Connectivity(" - "cf_role='edge_face_connectivity', start_index=0), Connectivity(" - "cf_role='boundary_node_connectivity', start_index=0)], " - "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " - "standard_name='longitude', units=Unit('unknown'), " - "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='edge_lat'), 'y')], face_coords_and_axes=[(AuxCoord(" - "array([0.5]), standard_name='longitude', units=Unit('unknown'), " - "var_name='face_lon'), 'x'), (AuxCoord(array([0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='face_lat'), 'y')], long_name='my_topology_mesh', " - "var_name='mesh', attributes={'notes': 'this is a test'}, " - "node_dimension='NodeDim', edge_dimension='EdgeDim', " - "face_dimension='FaceDim')" - ) - self.assertEqual(expected, self.mesh.__repr__()) + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___str__(self): + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 2", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " edge coordinates", + " ", + " ", + " face", + " face_dimension: 'FaceDim'", + ( + " face_node_connectivity: " + "" + ), + " face coordinates", + " ", + " ", + " optional connectivities", + ( + " face_face_connectivity: " + "" + ), + ( + " face_edge_connectivity: " + "" + ), + ( + " edge_face_connectivity: " + "" + ), + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes:", + " notes 'this is a test'", + ] + self.assertEqual(expected, str(self.mesh).split("\n")) + + # Test some different options of the str() operation here. + def test___str__noedgecoords(self): + mesh_kwargs = self.kwargs.copy() + del mesh_kwargs["edge_coords_and_axes"] + alt_mesh = mesh.Mesh(**mesh_kwargs) + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 2", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " face", + " face_dimension: 'FaceDim'", + ( + " face_node_connectivity: " + "" + ), + " face coordinates", + " ", + " ", + " optional connectivities", + ( + " face_face_connectivity: " + "" + ), + ( + " face_edge_connectivity: " + "" + ), + ( + " edge_face_connectivity: " + "" + ), + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes:", + " notes 'this is a test'", + ] + self.assertEqual(expected, str(alt_mesh).split("\n")) def test_all_connectivities(self): expected = mesh.Mesh2DConnectivities( @@ -445,7 +525,7 @@ def test_connectivity(self): contains_face=False, ) - def test_connectivities_locations(self): + def test_connectivities_elements(self): kwargs_expected = ( ( {"contains_node": True}, @@ -501,7 +581,7 @@ def test_connectivities_locations(self): for item in expected: self.assertIn(item, result) - def test_coords_locations(self): + def test_coords_elements(self): all_expected = { "node_x": self.NODE_LON, "node_y": self.NODE_LAT, @@ -569,6 +649,93 @@ def test_face_node(self): self.assertEqual(self.FACE_NODE, self.mesh.face_node_connectivity) +class Test__str__various(TestMeshCommon): + # Some extra testing for the str() operation : based on 1D meshes as simpler + def setUp(self): + # All the tests here want modified meshes, so use standard setUp to + # create afresh for each test, allowing them to modify it. + super().setUp() + # Mesh kwargs with topology_dimension=1 and all applicable + # arguments populated - this tests correct property setting. + self.kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": [self.EDGE_NODE], + "long_name": "my_topology_mesh", + "var_name": "mesh", + "attributes": {"notes": "this is a test"}, + "node_dimension": "NodeDim", + "edge_dimension": "EdgeDim", + "edge_coords_and_axes": ( + (self.EDGE_LON, "x"), + (self.EDGE_LAT, "y"), + ), + } + self.mesh = mesh.Mesh(**self.kwargs) + + def test___repr__basic(self): + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___repr__varname(self): + self.mesh.long_name = None + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___repr__noname(self): + self.mesh.long_name = None + self.mesh.var_name = None + expected = "" + self.assertRegex(repr(self.mesh), expected) + + def test___str__noattributes(self): + self.mesh.attributes = None + self.assertNotIn("attributes", str(self.mesh)) + + def test___str__emptyattributes(self): + self.mesh.attributes.clear() + self.assertNotIn("attributes", str(self.mesh)) + + def test__str__longstringattribute(self): + self.mesh.attributes["long_string"] = ( + "long_x_10_long_x_20_long_x_30_long_x_40_" + "long_x_50_long_x_60_long_x_70_long_x_80_" + ) + result = str(self.mesh) + # Note: initial single-quote, but no final one : this is correct ! + expected = ( + "'long_x_10_long_x_20_long_x_30_long_x_40_" + "long_x_50_long_x_60_long_x_70..." + ) + self.assertIn(expected + ":END", result + ":END") + + def test___str__units_stdname(self): + # These are usually missing, but they *can* be present. + mesh_kwargs = self.kwargs.copy() + mesh_kwargs["standard_name"] = "height" # Odd choice ! + mesh_kwargs["units"] = "m" + alt_mesh = mesh.Mesh(**mesh_kwargs) + result = str(alt_mesh) + # We expect these to appear at the end. + expected = "\n".join( + [ + " edge coordinates", + " ", + " ", + " standard_name: 'height'", + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " units: Unit('m')", + " attributes:", + " notes 'this is a test'", + ] + ) + self.assertTrue(result.endswith(expected)) + + class TestOperations1D(TestMeshCommon): # Tests that cannot re-use an existing Mesh instance, instead need a new # one each time. @@ -582,7 +749,7 @@ def setUp(self): @staticmethod def new_connectivity(connectivity, new_len=False): """Provide a new connectivity recognisably different from the original.""" - # NOTE: assumes non-transposed connectivity (src_dim=0). + # NOTE: assumes non-transposed connectivity (location_axis=0). if new_len: shape = (connectivity.shape[0] + 1, connectivity.shape[1]) else: @@ -1175,3 +1342,7 @@ def test_minimum_coords(self): mesh.Mesh, **kwargs, ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 740258b77c0..ce99a8b4be5 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -11,12 +11,13 @@ # importing anything else. import iris.tests as tests # isort:skip +import re import unittest.mock as mock import dask.array as da import numpy as np -from iris._lazy_data import is_lazy_data +from iris._lazy_data import as_lazy_data, is_lazy_data from iris.common.metadata import BaseMetadata from iris.coords import AuxCoord, Coord from iris.cube import Cube @@ -60,7 +61,7 @@ def test_fail_bad_mesh(self): sample_meshcoord(mesh=mock.sentinel.odd) def test_valid_locations(self): - for loc in Mesh.LOCATIONS: + for loc in Mesh.ELEMENTS: meshcoord = sample_meshcoord(location=loc) self.assertEqual(meshcoord.location, loc) @@ -268,42 +269,117 @@ def setUp(self): def _expected_elements_regexp( self, - mesh_strstyle=True, - standard_name=True, - long_name=True, + standard_name="longitude", + long_name="long-name", attributes=True, + location="face", + axis="x", ): - regexp = r"^MeshCoord\(mesh=" - if mesh_strstyle: - regexp += r"Mesh\('test_mesh'\)" - else: - regexp += "" - regexp += r", location='face', axis='x', shape=\(3,\)" + # Printed name is standard or long -- we don't have a case with neither + coord_name = standard_name or long_name + # Construct regexp in 'sections' + # NB each consumes upto first non-space in the next line + regexp = f"MeshCoord : {coord_name} / [^\n]+\n *" + regexp += r"mesh: \\n *" + regexp += f"location: '{location}'\n *" + # Now some optional sections : whichever comes first will match + # arbitrary content leading up to it. + matched_any_upto = False if standard_name: - regexp += ", standard_name='longitude'" - regexp += r", units=Unit\('degrees_east'\)" + regexp += ".*" + matched_any_upto = True + regexp += f"standard_name: '{standard_name}'\n *" if long_name: - regexp += ", long_name='long-name'" + if not matched_any_upto: + regexp += ".*" + matched_any_upto = True + regexp += f"long_name: '{long_name}'\n *" if attributes: - regexp += r", attributes={'a': 1, 'b': 'c'}" - regexp += r"\)$" + # if we expected attributes, they should come next + # TODO: change this when each attribute goes on a new line + if not matched_any_upto: + regexp += ".*" + matched_any_upto = True + # match 'attributes:' followed by N*lines with larger indent + regexp += "attributes:(\n [^ \n]+ +[^ \n]+)+\n " + # After those items, expect 'axis' next + # N.B. this FAILS if we had attributes when we didn't expect them + regexp += f"axis: '{axis}'$" # N.B. this is always the end + + # Compile regexp, also allowing matches across newlines + regexp = re.compile(regexp, flags=re.DOTALL) return regexp def test_repr(self): + # A simple check for the condensed form. + result = repr(self.meshcoord) + expected = ( + "" + ) + self.assertEqual(expected, result) + + def test_repr_lazy(self): + # Displays lazy content (and does not realise!). + self.meshcoord.points = as_lazy_data(self.meshcoord.points) + self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + result = repr(self.meshcoord) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + expected = ( + "+bounds shape(3,)>" + ) + self.assertEqual(expected, result) + + def test_repr__nameless_mesh(self): + # Check what it does when the Mesh doesn't have a name. + self.mesh.long_name = None + assert self.mesh.name() == "unknown" result = repr(self.meshcoord) - re_expected = self._expected_elements_regexp(mesh_strstyle=False) + re_expected = ( + r".MeshCoord: longitude / \(degrees_east\) " + r"mesh\(.Mesh object at 0x[^>]+.\) location\(face\) " + ) self.assertRegex(result, re_expected) def test__str__(self): + # Basic output contains mesh, location, standard_name, long_name, + # attributes, mesh, location and axis + result = str(self.meshcoord) + re_expected = self._expected_elements_regexp() + self.assertRegex(result, re_expected) + + def test__str__lazy(self): + # Displays lazy content (and does not realise!). + self.meshcoord.points = as_lazy_data(self.meshcoord.points) + self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) + result = str(self.meshcoord) - re_expected = self._expected_elements_regexp(mesh_strstyle=True) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + self.assertIn("points: ", result) + self.assertIn("bounds: ", result) + re_expected = self._expected_elements_regexp() self.assertRegex(result, re_expected) def test_alternative_location_and_axis(self): meshcoord = sample_meshcoord(mesh=self.mesh, location="edge", axis="y") result = str(meshcoord) - re_expected = r", location='edge', axis='y'" + re_expected = self._expected_elements_regexp( + standard_name="latitude", + long_name=None, + location="edge", + axis="y", + attributes=None, + ) self.assertRegex(result, re_expected) + # Basic output contains standard_name, long_name, attributes def test_str_no_long_name(self): mesh = self.mesh @@ -461,12 +537,12 @@ def _make_test_meshcoord( lazy_sources=False, location="face", inds_start_index=0, - inds_src_dim=0, + inds_location_axis=0, facenodes_changes=None, ): # Construct a miniature face-nodes mesh for testing. # NOTE: we will make our connectivity arrays with standard - # start_index=0 and src_dim=0 : We only adjust that (if required) when + # start_index=0 and location_axis=0 : We only adjust that (if required) when # creating the actual connectivities. face_nodes_array = np.array( [ @@ -551,26 +627,26 @@ def lazify(arr): inds_start_index + ( face_nodes_array.transpose() - if inds_src_dim == 1 + if inds_location_axis == 1 else face_nodes_array ), cf_role="face_node_connectivity", long_name="face_nodes", start_index=inds_start_index, - src_dim=inds_src_dim, + location_axis=inds_location_axis, ) edge_node_conn = Connectivity( inds_start_index + ( edge_nodes_array.transpose() - if inds_src_dim == 1 + if inds_location_axis == 1 else edge_nodes_array ), cf_role="edge_node_connectivity", long_name="edge_nodes", start_index=inds_start_index, - src_dim=inds_src_dim, + location_axis=inds_location_axis, ) self.mesh = Mesh( @@ -654,9 +730,9 @@ def test_edge_bounds(self): # NB simpler than faces : no possibility of missing points self.assertArrayAlmostEqual(result, expected) - def test_bounds_connectivity__src_dim_1(self): + def test_bounds_connectivity__location_axis_1(self): # Test with a transposed indices array. - self._make_test_meshcoord(inds_src_dim=1) + self._make_test_meshcoord(inds_location_axis=1) self._check_expected_bounds_values() def test_bounds_connectivity__start_index_1(self): diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index aee5018e5b3..edd34f94a1a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -217,7 +217,7 @@ def test_mixed_shapes(self): mesh = self.create() self.assertArrayEqual( - mesh.face_node_connectivity.src_lengths(), [4, 4, 3] + mesh.face_node_connectivity.location_lengths(), [4, 4, 3] ) self.assertEqual(mesh.node_coords.node_x.points[-1], 0.0) self.assertEqual(mesh.node_coords.node_y.points[-1], 0.0) diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index f119f53729f..af92e69b080 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -29,7 +29,7 @@ def setUp(self): self.attributes = mock.sentinel.attributes self.cf_role = mock.sentinel.cf_role self.start_index = mock.sentinel.start_index - self.src_dim = mock.sentinel.src_dim + self.location_axis = mock.sentinel.location_axis self.cls = ConnectivityMetadata def test_repr(self): @@ -41,12 +41,12 @@ def test_repr(self): attributes=self.attributes, cf_role=self.cf_role, start_index=self.start_index, - src_dim=self.src_dim, + location_axis=self.location_axis, ) fmt = ( "ConnectivityMetadata(standard_name={!r}, long_name={!r}, " "var_name={!r}, units={!r}, attributes={!r}, cf_role={!r}, " - "start_index={!r}, src_dim={!r})" + "start_index={!r}, location_axis={!r})" ) expected = fmt.format( self.standard_name, @@ -56,7 +56,7 @@ def test_repr(self): self.attributes, self.cf_role, self.start_index, - self.src_dim, + self.location_axis, ) self.assertEqual(expected, repr(metadata)) @@ -69,7 +69,7 @@ def test__fields(self): "attributes", "cf_role", "start_index", - "src_dim", + "location_axis", ) self.assertEqual(self.cls._fields, expected) @@ -87,14 +87,14 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata - # The "src_dim" member is stateful only, and does not participate in + # The "location_axis" member is stateful only, and does not participate in # lenient/strict equivalence. - self.members_no_src_dim = filter( - lambda member: member != "src_dim", self.cls._members + self.members_no_location_axis = filter( + lambda member: member != "location_axis", self.cls._members ) def test_wraps_docstring(self): @@ -140,7 +140,7 @@ def test_op_lenient_same_none(self): self.assertTrue(rmetadata.__eq__(lmetadata)) def test_op_lenient_same_members_none(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None @@ -152,10 +152,10 @@ def test_op_lenient_same_members_none(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_lenient_same_src_dim_none(self): + def test_op_lenient_same_location_axis_none(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = None + right["location_axis"] = None rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=True): @@ -173,7 +173,7 @@ def test_op_lenient_different(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_lenient_different_members(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy @@ -185,10 +185,10 @@ def test_op_lenient_different_members(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_lenient_different_src_dim(self): + def test_op_lenient_different_location_axis(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = self.dummy + right["location_axis"] = self.dummy rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=True): @@ -214,7 +214,7 @@ def test_op_strict_different(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_strict_different_members(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy @@ -226,10 +226,10 @@ def test_op_strict_different_members(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_strict_different_src_dim(self): + def test_op_strict_different_location_axis(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = self.dummy + right["location_axis"] = self.dummy rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=False): @@ -247,7 +247,7 @@ def test_op_strict_different_none(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_strict_different_members_none(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None @@ -259,10 +259,10 @@ def test_op_strict_different_members_none(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_strict_different_src_dim_none(self): + def test_op_strict_different_location_axis_none(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = None + right["location_axis"] = None rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=False): @@ -311,7 +311,7 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata @@ -508,7 +508,7 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index dfe2895f298..1b9857c0be0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -28,7 +28,7 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): + def test_valid_base(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], @@ -45,85 +45,50 @@ def test_valid(self): self.assertTrue(is_valid) - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # Mercator projections + def test_valid_false_easting_northing(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=0, - scale_factor_at_projection_origin=0.9, + longitude_of_projection_origin=-90, + false_easting=15, + false_northing=10, + scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") + self.assertTrue(is_valid) - def test_invalid_standard_parallel(self): - # Iris does not yet support standard parallels other than zero for - # Mercator projections + def test_valid_standard_parallel(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, + longitude_of_projection_origin=-90, false_easting=0, false_northing=0, - standard_parallel=30, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Standard parallel") - - def test_invalid_false_easting(self): - # Iris does not yet support false eastings other than zero for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=100, - false_northing=0, - scale_factor_at_projection_origin=1, + standard_parallel=15, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False easting") + self.assertTrue(is_valid) - def test_invalid_false_northing(self): - # Iris does not yet support false northings other than zero for + def test_invalid_scale_factor(self): + # Iris does not yet support scale factors other than one for # Mercator projections cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, - false_northing=100, - scale_factor_at_projection_origin=1, + false_northing=0, + scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) @@ -135,7 +100,7 @@ def test_invalid_false_northing(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False northing") + self.assertRegex(str(warns[0]), "Scale factor") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py index 87c2df7d45a..a914dd3314a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py @@ -184,7 +184,7 @@ def mesh_location_size(mesh, location): if conn is None: result = 0 else: - result = conn.shape[conn.src_dim] + result = conn.shape[conn.location_axis] return result @@ -705,7 +705,7 @@ def test_connectivity_dim_order(self): # Get the face-node and edge-node connectivities face_nodes_conn = mesh.face_node_connectivity edge_nodes_conn = mesh.edge_node_connectivity - # Transpose them : N.B. this sets src_dim=1, as it should be. + # Transpose them : N.B. this sets location_axis=1, as it should be. nodesfirst_faces_conn = face_nodes_conn.transpose() nodesfirst_edges_conn = edge_nodes_conn.transpose() # Make a new mesh with both face and edge connectivities 'transposed'. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index 0f2a8a2d4b9..c9c4821e0aa 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -65,7 +65,7 @@ def test_3d(self): def test_multiple_odd_dims(self): # Test to ensure multiple collapsed dimensions don't interfere. # make a 5-D array where dimensions 0, 2 and 3 are degenerate. - array = np.arange(3 ** 5).reshape([3] * 5) + array = np.arange(3**5).reshape([3] * 5) array[1:] = array[0:1] array[:, :, 1:] = array[:, :, 0:1] array[:, :, :, 1:] = array[:, :, :, 0:1] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index d975884cb0e..2aae32b1ae8 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -13,8 +13,6 @@ # importing anything else. import iris.tests as tests # isort:skip -import unittest - from cf_units import CALENDAR_360_DAY, CALENDAR_GREGORIAN, Unit from cftime import datetime as nc_datetime import numpy as np @@ -733,7 +731,6 @@ def test_t1_list_t2_scalar(self): class TestArrayInputWithLBTIM_0_3_1(TestField): - @unittest.skip("#3508 investigate unit test failure") def test_t1_scalar_t2_list(self): lbtim = _lbtim(ib=3, ic=1) lbcode = _lbcode(1) @@ -756,9 +753,13 @@ def test_t1_scalar_t2_list(self): ) # Expected coords. + leap_year_adjust = np.array([0, 24, 24]) points = np.ones_like(years) * lbft bounds = np.array( - [lbft - ((years - 1970) * 365 * 24 + 2 * 24), points] + [ + lbft - ((years - 1970) * 365 * 24 + 2 * 24 + leap_year_adjust), + points, + ] ).transpose() fp_coord = AuxCoord( points, @@ -766,7 +767,7 @@ def test_t1_scalar_t2_list(self): units="hours", bounds=bounds, ) - points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + leap_year_adjust bounds = np.array( [np.ones_like(points) * (8 * 24 + 9), points] ).transpose() diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py new file mode 100755 index 00000000000..3a896a111c4 --- /dev/null +++ b/lib/iris/tests/unit/io/test__generate_cubes.py @@ -0,0 +1,37 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.io._generate_cubes` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from pathlib import Path + +import iris + + +class TestGenerateCubes(tests.IrisTest): + def test_pathlib_paths(self): + test_variants = [ + ("string", "string"), + (["string"], "string"), + (Path("string"), Path("string")), + ] + + decode_uri_mock = self.patch( + "iris.iris.io.decode_uri", return_value=("file", None) + ) + self.patch("iris.iris.io.load_files") + + for gc_arg, du_arg in test_variants: + decode_uri_mock.reset_mock() + list(iris._generate_cubes(gc_arg, None, None)) + decode_uri_mock.assert_called_with(du_arg) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py new file mode 100755 index 00000000000..b92e26f2d12 --- /dev/null +++ b/lib/iris/tests/unit/io/test_save.py @@ -0,0 +1,45 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.io.save` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from pathlib import Path +from unittest import mock + +import iris +from iris.cube import Cube + + +class TestSave(tests.IrisTest): + def test_pathlib_save(self): + file_mock = mock.Mock() + # Have to configure after creation because "name" is special + file_mock.configure_mock(name="string") + + find_saver_mock = self.patch( + "iris.io.find_saver", return_value=(lambda *args, **kwargs: None) + ) + + test_variants = [ + ("string", "string"), + (Path("string/string"), "string/string"), + (file_mock, "string"), + ] + + for target, fs_val in test_variants: + try: + iris.save(Cube([]), target) + except ValueError: + print("ValueError") + pass + find_saver_mock.assert_called_with(fs_val) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index 157780dcae7..1ad5c876919 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -23,6 +23,7 @@ def test_gregorian_calendar(self): unit = Unit("hours since 2000-04-13 00:00:00", calendar="gregorian") coord = AuxCoord([1, 3, 6], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 1), datetime.datetime(2000, 4, 13, 3), @@ -34,6 +35,7 @@ def test_gregorian_calendar_sub_second(self): unit = Unit("seconds since 2000-04-13 00:00:00", calendar="gregorian") coord = AuxCoord([1, 1.25, 1.5], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 0, 0, 1), datetime.datetime(2000, 4, 13, 0, 0, 1), @@ -52,9 +54,7 @@ def test_360_day_calendar(self): cftime.datetime(2000, 2, 29, calendar=calendar), cftime.datetime(2000, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_365_day_calendar(self): @@ -67,9 +67,7 @@ def test_365_day_calendar(self): cftime.datetime(2000, 2, 25, 1, 0, calendar=calendar), cftime.datetime(2000, 2, 25, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_360_day_calendar_attribute(self): diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 8370c719f03..40a932b9e0c 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -142,7 +142,7 @@ def test_columns_long_attribute(self): " Attributes:", ( " very_very_very_very_very_long_name " - "longish string extends beyond dim columns" + "'longish string extends beyond dim columns'" ), ] self.assertEqual(rep, expected) @@ -442,7 +442,7 @@ def test_section_cube_attributes(self): " Attributes:", " list [3]", " number 1.2", - " string four five in a string", + " string 'four five in a string'", " z_tupular (6, (7, 8))", ] self.assertEqual(rep, expected) @@ -464,7 +464,7 @@ def test_section_cube_attributes__string_extras(self): " Attributes:", " escaped 'escaped\\tstring'", ( - " long this is very very very " + " long 'this is very very very " "very very very very very very very very very very..." ), ( diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index c8af3437e66..3e411c020dd 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -183,7 +183,11 @@ def test_attributes(self): attribute_section = rep.scalar_sections["Attributes:"] attribute_contents = attribute_section.contents - expected_contents = ["a: 1", "b: two", "c: ' this \\n that\\tand.'"] + expected_contents = [ + "a: 1", + "b: 'two'", + "c: ' this \\n that\\tand.'", + ] # Note: a string with \n or \t in it gets "repr-d". # Other strings don't (though in coord 'extra' lines, they do.) diff --git a/lib/iris/util.py b/lib/iris/util.py index 9ab413a493b..53cd78724e2 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -10,8 +10,6 @@ from abc import ABCMeta, abstractmethod from collections.abc import Hashable, Iterable -from contextlib import contextmanager -import copy import functools import inspect import os @@ -396,10 +394,24 @@ def normalise_array(array): def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): """ - Returns whether two numbers are almost equal, allowing for the - finite precision of floating point numbers. + Returns whether two numbers are almost equal, allowing for the finite + precision of floating point numbers. + + .. deprecated:: 3.2.0 + + Instead use :func:`math.isclose`. For example, rather than calling + ``approx_equal(a, b, max_abs, max_rel)`` replace with ``math.isclose(a, + b, max_rel, max_abs)``. Note that :func:`~math.isclose` will return True + if the actual error equals the maximum, whereas :func:`util.approx_equal` + will return False. """ + wmsg = ( + "iris.util.approx_equal has been deprecated and will be removed, " + "please use math.isclose instead." + ) + warn_deprecated(wmsg) + # Deal with numbers close to zero if abs(a - b) < max_absolute_error: return True @@ -1054,18 +1066,20 @@ def format_array(arr): """ - summary_insert = "" summary_threshold = 85 + summary_insert = "..." if arr.size > summary_threshold else "" edge_items = 3 ffunc = str - formatArray = np.core.arrayprint._formatArray max_line_len = 50 - legacy = "1.13" - if arr.size > summary_threshold: - summary_insert = "..." - options = np.get_printoptions() - options["legacy"] = legacy - with _printopts_context(**options): + + # Format the array with version 1.13 legacy behaviour + with np.printoptions(legacy="1.13"): + # Use this (private) routine for more control. + formatArray = np.core.arrayprint._formatArray + # N.B. the 'legacy' arg had different forms in different numpy versions + # -- fetch the required form from the internal options dict + format_options_legacy = np.core.arrayprint._format_options["legacy"] + result = formatArray( arr, ffunc, @@ -1074,29 +1088,12 @@ def format_array(arr): separator=", ", edge_items=edge_items, summary_insert=summary_insert, - legacy=legacy, + legacy=format_options_legacy, ) return result -@contextmanager -def _printopts_context(**kwargs): - """ - Update the numpy printoptions for the life of this context manager. - - Note: this function can be removed with numpy>=1.15 thanks to - https://github.com/numpy/numpy/pull/10406 - - """ - original_opts = np.get_printoptions() - np.set_printoptions(**kwargs) - try: - yield - finally: - np.set_printoptions(**original_opts) - - def new_axis(src_cube, scalar_coord=None): """ Create a new axis as the leading dimension of the cube, promoting a scalar @@ -1169,133 +1166,6 @@ def new_axis(src_cube, scalar_coord=None): return new_cube -def as_compatible_shape(src_cube, target_cube): - """ - Return a cube with added length one dimensions to match the dimensionality - and dimension ordering of `target_cube`. - - This function can be used to add the dimensions that have been collapsed, - aggregated or sliced out, promoting scalar coordinates to length one - dimension coordinates where necessary. It operates by matching coordinate - metadata to infer the dimensions that need modifying, so the provided - cubes must have coordinates with the same metadata - (see :class:`iris.common.CoordMetadata`). - - .. note:: This function will load and copy the data payload of `src_cube`. - - .. deprecated:: 3.0.0 - - Instead use :class:`~iris.common.resolve.Resolve`. For example, rather - than calling ``as_compatible_shape(src_cube, target_cube)`` replace - with ``Resolve(src_cube, target_cube)(target_cube.core_data())``. - - Args: - - * src_cube: - An instance of :class:`iris.cube.Cube` with missing dimensions. - - * target_cube: - An instance of :class:`iris.cube.Cube` with the desired dimensionality. - - Returns: - A instance of :class:`iris.cube.Cube` with the same dimensionality as - `target_cube` but with the data and coordinates from `src_cube` - suitably reshaped to fit. - - """ - from iris.cube import Cube - - wmsg = ( - "iris.util.as_compatible_shape has been deprecated and will be " - "removed, please use iris.common.resolve.Resolve instead." - ) - warn_deprecated(wmsg) - - dim_mapping = {} - for coord in target_cube.aux_coords + target_cube.dim_coords: - dims = target_cube.coord_dims(coord) - try: - collapsed_dims = src_cube.coord_dims(coord) - except iris.exceptions.CoordinateNotFoundError: - continue - if collapsed_dims: - if len(collapsed_dims) == len(dims): - for dim_from, dim_to in zip(dims, collapsed_dims): - dim_mapping[dim_from] = dim_to - elif dims: - for dim_from in dims: - dim_mapping[dim_from] = None - - if len(dim_mapping) != target_cube.ndim: - raise ValueError( - "Insufficient or conflicting coordinate " - "metadata. Cannot infer dimension mapping " - "to restore cube dimensions." - ) - - new_shape = [1] * target_cube.ndim - for dim_from, dim_to in dim_mapping.items(): - if dim_to is not None: - new_shape[dim_from] = src_cube.shape[dim_to] - - new_data = src_cube.data.copy() - - # Transpose the data (if necessary) to prevent assignment of - # new_shape doing anything except adding length one dims. - order = [v for k, v in sorted(dim_mapping.items()) if v is not None] - if order != sorted(order): - new_order = [order.index(i) for i in range(len(order))] - new_data = np.transpose(new_data, new_order).copy() - - new_cube = Cube(new_data.reshape(new_shape)) - new_cube.metadata = copy.deepcopy(src_cube.metadata) - - # Record a mapping from old coordinate IDs to new coordinates, - # for subsequent use in creating updated aux_factories. - coord_mapping = {} - - reverse_mapping = {v: k for k, v in dim_mapping.items() if v is not None} - - def add_coord(coord): - """Closure used to add a suitably reshaped coord to new_cube.""" - all_dims = target_cube.coord_dims(coord) - src_dims = [ - dim - for dim in src_cube.coord_dims(coord) - if src_cube.shape[dim] > 1 - ] - mapped_dims = [reverse_mapping[dim] for dim in src_dims] - length1_dims = [dim for dim in all_dims if new_cube.shape[dim] == 1] - dims = length1_dims + mapped_dims - shape = [new_cube.shape[dim] for dim in dims] - if not shape: - shape = [1] - points = coord.points.reshape(shape) - bounds = None - if coord.has_bounds(): - bounds = coord.bounds.reshape(shape + [coord.nbounds]) - new_coord = coord.copy(points=points, bounds=bounds) - # If originally in dim_coords, add to dim_coords, otherwise add to - # aux_coords. - if target_cube.coords(coord, dim_coords=True): - try: - new_cube.add_dim_coord(new_coord, dims) - except ValueError: - # Catch cases where the coord is an AuxCoord and therefore - # cannot be added to dim_coords. - new_cube.add_aux_coord(new_coord, dims) - else: - new_cube.add_aux_coord(new_coord, dims) - coord_mapping[id(coord)] = new_coord - - for coord in src_cube.aux_coords + src_cube.dim_coords: - add_coord(coord) - for factory in src_cube.aux_factories: - new_cube.add_aux_factory(factory.updated(coord_mapping)) - - return new_cube - - def squeeze(cube): """ Removes any dimension of length one. If it has an associated DimCoord or diff --git a/noxfile.py b/noxfile.py index 497330de377..e4d91c6bab1 100755 --- a/noxfile.py +++ b/noxfile.py @@ -8,6 +8,8 @@ import hashlib import os from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import Literal import nox from nox.logger import logger @@ -16,7 +18,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.7", "3.8"] +_PY_VERSIONS_ALL = ["3.8"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds @@ -28,6 +30,13 @@ #: Default cartopy cache directory. CARTOPY_CACHE_DIR = os.environ.get("HOME") / Path(".local/share/cartopy") +# https://github.com/numpy/numpy/pull/19478 +# https://github.com/matplotlib/matplotlib/pull/22099 +#: Common session environment variables. +ENV = dict( + NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512VL,AVX512BW,AVX512DQ,AVX512_SKX" +) + def session_lockfile(session: nox.sessions.Session) -> Path: """Return the path of the session lockfile.""" @@ -210,6 +219,7 @@ def tests(session: nox.sessions.Session): """ prepare_venv(session) session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.run( "python", "-m", @@ -232,6 +242,7 @@ def doctest(session: nox.sessions.Session): """ prepare_venv(session) session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.cd("docs") session.run( "make", @@ -280,48 +291,185 @@ def linkcheck(session: nox.sessions.Session): ) -@nox.session(python=PY_VER[-1], venv_backend="conda") +@nox.session @nox.parametrize( - ["ci_mode"], - [True, False], - ids=["ci compare", "full"], + "run_type", + ["overnight", "branch", "custom"], + ids=["overnight", "branch", "custom"], ) -def benchmarks(session: nox.sessions.Session, ci_mode: bool): +def benchmarks( + session: nox.sessions.Session, + run_type: Literal["overnight", "branch", "custom"], +): """ - Perform esmf-regrid performance benchmarks (using Airspeed Velocity). + Perform Iris performance benchmarks (using Airspeed Velocity). + + All run types require a single Nox positional argument (e.g. + ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters + section - and can optionally accept a series of further arguments that will + be added to session's ASV command. Parameters ---------- session: object A `nox.sessions.Session` object. - ci_mode: bool - Run a cut-down selection of benchmarks, comparing the current commit to - the last commit for performance regressions. - - Notes - ----- - ASV is set up to use ``nox --session=tests --install-only`` to prepare - the benchmarking environment. This session environment must use a Python - version that is also available for ``--session=tests``. + run_type: {"overnight", "branch", "custom"} + * ``overnight``: benchmarks all commits between the input **first + commit** to ``HEAD``, comparing each to its parent for performance + shifts. If a commit causes shifts, the output is saved to a file: + ``.asv/performance-shifts/``. Designed for checking the + previous 24 hours' commits, typically in a scheduled script. + * ``branch``: Performs the same operations as ``overnight``, but always + on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the + input **base branch**. Output from this run is never saved to a file. + Designed for testing if the active branch's changes cause performance + shifts - anticipating what would be caught by ``overnight`` once + merged. + **For maximum accuracy, avoid using the machine that is running this + session. Run time could be >1 hour for the full benchmark suite.** + * ``custom``: run ASV with the input **ASV sub-command**, without any + preset arguments - must all be supplied by the user. So just like + running ASV manually, with the convenience of re-using the session's + scripted setup steps. + + Examples + -------- + * ``nox --session="benchmarks(overnight)" -- a1b23d4`` + * ``nox --session="benchmarks(branch)" -- upstream/main`` + * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` + * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` + * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` """ + # The threshold beyond which shifts are 'notable'. See `asv compare`` docs + # for more. + COMPARE_FACTOR = 1.2 + session.install("asv", "nox") + + data_gen_var = "DATA_GEN_PYTHON" + if data_gen_var in os.environ: + print("Using existing data generation environment.") + else: + print("Setting up the data generation environment...") + # Get Nox to build an environment for the `tests` session, but don't + # run the session. Will re-use a cached environment if appropriate. + session.run_always( + "nox", + "--session=tests", + "--install-only", + f"--python={_PY_VERSION_LATEST}", + ) + # Find the environment built above, set it to be the data generation + # environment. + data_gen_python = next( + Path(".nox").rglob(f"tests*/bin/python{_PY_VERSION_LATEST}") + ).resolve() + session.env[data_gen_var] = data_gen_python + + mule_dir = data_gen_python.parents[1] / "resources" / "mule" + if not mule_dir.is_dir(): + print("Installing Mule into data generation environment...") + session.run_always( + "git", + "clone", + "https://github.com/metomi/mule.git", + str(mule_dir), + external=True, + ) + session.run_always( + str(data_gen_python), + "-m", + "pip", + "install", + str(mule_dir / "mule"), + external=True, + ) + + print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") - def asv_exec(*sub_args: str) -> None: - run_args = ["asv", *sub_args] - session.run(*run_args) - - if ci_mode: - # If on a PR: compare to the base (target) branch. - # Else: compare to previous commit. - previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") - try: - asv_exec("continuous", "--factor=1.2", previous_commit, "HEAD") - finally: - asv_exec("compare", previous_commit, "HEAD") + # All run types require one Nox posarg. + run_type_arg = { + "overnight": "first commit", + "branch": "base branch", + "custom": "ASV sub-command", + } + if run_type not in run_type_arg.keys(): + message = f"Unsupported run-type: {run_type}" + raise NotImplementedError(message) + if not session.posargs: + message = ( + f"Missing mandatory first Nox session posarg: " + f"{run_type_arg[run_type]}" + ) + raise ValueError(message) + first_arg = session.posargs[0] + # Optional extra arguments to be passed down to ASV. + asv_args = session.posargs[1:] + + def asv_compare(*commits): + """Run through a list of commits comparing each one to the next.""" + commits = [commit[:8] for commit in commits] + shifts_dir = Path(".asv") / "performance-shifts" + for i in range(len(commits) - 1): + before = commits[i] + after = commits[i + 1] + asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" + session.run(*asv_command_.split(" ")) + + if run_type == "overnight": + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = session.run( + *asv_command_.split(" "), "--only-changed", silent=True + ) + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = shifts_dir / after + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) + + # Common ASV arguments used for both `overnight` and `bench` run_types. + asv_harness = "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict --show-stderr" + + if run_type == "overnight": + first_commit = first_arg + commit_range = f"{first_commit}^^.." + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = session.run( + *git_command.split(" "), silent=True, external=True + ) + commit_list = commit_string.rstrip().split("\n") + asv_compare(*reversed(commit_list)) + + elif run_type == "branch": + base_branch = first_arg + git_command = f"git merge-base HEAD {base_branch}" + merge_base = session.run( + *git_command.split(" "), silent=True, external=True + )[:8] + + with NamedTemporaryFile("w") as hashfile: + hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.flush() + commit_range = f"HASHFILE:{hashfile.name}" + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + asv_compare(merge_base, "HEAD") + else: - # f5ceb808 = first commit supporting nox --install-only . - asv_exec("run", "f5ceb808..HEAD") + asv_subcommand = first_arg + assert run_type == "custom" + session.run("asv", asv_subcommand, *asv_args) diff --git a/pyproject.toml b/pyproject.toml index 8d01db2af79..26e6ae727a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 79 -target-version = ['py37', 'py38'] +target-version = ['py38'] include = '\.pyi?$' extend-exclude = ''' ( diff --git a/requirements/ci/nox.lock/py37-linux-64.lock b/requirements/ci/nox.lock/py37-linux-64.lock deleted file mode 100644 index 06bcca68f8b..00000000000 --- a/requirements/ci/nox.lock/py37-linux-64.lock +++ /dev/null @@ -1,228 +0,0 @@ -# Generated by conda-lock. -# platform: linux-64 -# input_hash: 2ded1a5e8a7c81e393e358171ff923c72d099e77a007d70daa2a15beb3a59545 -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.10.8-ha878542_0.tar.bz2#575611b8a84f45960e87722eeb51fa26 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_11.tar.bz2#2dcb18a9a0fa31f4f29e5a9b3eade394 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_11.tar.bz2#0bf83958e788f1e75ba26154cb702afe -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.27-ha770c72_3.tar.bz2#49210aaa9080888f9f9b460c70202bd3 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_11.tar.bz2#4ea2f9f83b617a7682e8aa05dcb37c6a -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_11.tar.bz2#1d16527c76842bf9c41e9399d39d8097 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_11.tar.bz2#e3495f4f93cfd6b68021cbe2b5844cd5 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.2-h9c3ff4c_0.tar.bz2#0fb039650fa638f258fdc9e9ef125f52 -https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.1-h9c3ff4c_1.tar.bz2#17a5f413039ce1e105fab5df9c668eb5 -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.2-h9c3ff4c_0.tar.bz2#6618c9b191638993f2a818c6529e1b49 -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 -https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe5266_0.tar.bz2#41532e4448c0cce086d6570f95e4e12e -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.1-h7f98852_0.tar.bz2#90607c4c0247f04ec98b48997de71c1a -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_openblas.tar.bz2#4f93ba28c628a2c27cf39c055e6b219c -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.1-ha95c52a_0.tar.bz2#4eec219a4bd69c11579601804cec5baf -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-12_linux64_openblas.tar.bz2#2e5082d4a9a18c21100e6ce5b6bcb4ec -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-12_linux64_openblas.tar.bz2#9f401a6807a97e0c859d7522ae3d51ec -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_1.tar.bz2#d0a7846b7b3b8fb0d8b36904a53b8155 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.27-hfa10184_3.tar.bz2#7cd299934880b05703ee86a62325982f -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2#612385c4a83edb0619fe911d9da317f4 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_2.tar.bz2#6221115a24700aa8598ae5aa1574902d -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.1-h3452ae3_0.tar.bz2#6d4bf6265d998b6c975c26a6a24062a2 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.73-hb5efdd6_0.tar.bz2#a5b91a14292ac34bac1f0506a3772fd5 -https://conda.anaconda.org/conda-forge/linux-64/python-3.7.12-hb7a2778_100_cpython.tar.bz2#2d94b3e6a9fdaf83f6955d008c8011a7 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.9-pyhd8ed1ab_0.tar.bz2#a57a3f6f2b0a7400e340f850c405df19 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_0.tar.bz2#d05900c9b0ef4c3d1cef2e8a5c49350e -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.11.1-pyhd8ed1ab_0.tar.bz2#a510ec93fdb50775091d2afba98a8acb -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_2.tar.bz2#3cf866063f2803944ddaee8b1d6da531 -https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2#9c9aea4b8391264477df484f798562d0 -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.80.0-h2574ce0_0.tar.bz2#5d0784b790350f7939bb5d3f2c32e700 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.5-hd57d9b9_1.tar.bz2#a0f425d61c7df890d6381ea352c3f1d7 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d -https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.6-pyhd8ed1ab_0.tar.bz2#3087df8c636c5a00e694605c39ce4982 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.7-2_cp37m.tar.bz2#afff88bf9a7048da740c70aeb8cdbb82 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.0.1-pyha770c72_0.tar.bz2#1fc03816925d3cb7fdab9ab234e7fea7 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.6.0-pyhd8ed1ab_0.tar.bz2#855e2c4622f5eb50a4f6f7167b9ba17a -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py37h89c1867_1003.tar.bz2#490366305378c8690b65c4bce9b9f6a4 -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py37h89c1867_1.tar.bz2#48e8442b6097c7d4a0e3494c74ff9eeb -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py37h036bc23_0.tar.bz2#05ab26c7685bcb7dd8bc8752c121f823 -https://conda.anaconda.org/conda-forge/linux-64/curl-7.80.0-h2574ce0_0.tar.bz2#4d8fd67e5ab7e00fde8ad085464f43b7 -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.26-py37hcd2ae1e_0.tar.bz2#ab81ddd8474c4cee87fe2f9ef163f44f -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py37h89c1867_1.tar.bz2#e0a3be74a594032b73f22762ba9941cc -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.0-py37h89c1867_0.tar.bz2#5187ab9fedd67074b301ba81ae01fd45 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py37h2527ec5_1.tar.bz2#441ac4d93d0d57d21ea9dcac48cb5d0d -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6ad9fb6_0.tar.bz2#45142dc44fcd04934f9ad68ce205e54d -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py37h5e8e339_1.tar.bz2#6c7c14c95d4c435b66261639b64c7c51 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py37h1e5cb63_0.tar.bz2#3d5ca9f081a7756df4f027776ff23b73 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.5-py37hf2998dd_0.tar.bz2#ae1049dd3d8d15fc02af2f417cff5494 -https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.2-py37h718be6c_0.tar.bz2#ecac4e308b87ff93d44ea5e56ab39084 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.0-h277dcde_0.tar.bz2#7ba8c7a9bf1c2fedf4a6d6dc92839baf -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py37hcd2ae1e_8.tar.bz2#ae12b17bbd5733cb8884b42dcc5c59f0 -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py37h89c1867_4.tar.bz2#44df88d27e2891f90e3f06dcfcca0927 -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py37h5e8e339_1.tar.bz2#c89489cddb9e53155e241e9aacd35e4b -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py37h5e8e339_3.tar.bz2#7f167ecf4d4771ee33589e09479238e7 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.2.0-py37h89c1867_0.tar.bz2#2ad2bbd333df969fb4ecadbabec85603 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py37h5e8e339_2.tar.bz2#ec86ae00c96dea5f2d810957a8fabc26 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py37h5e8e339_0.tar.bz2#9f4ac5fb219d7c63c3c3cd9c630b81a6 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py37h5e8e339_1003.tar.bz2#4ad2e74470a3c08b0f6d59699f0d9a32 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.1.1-py37hb1e94ed_1.tar.bz2#1b5b81088bc7d7e0bef7de4ef4bd1221 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py37hf1a17b8_0.tar.bz2#7ad2c98aaab85d80017b3a6f79a2aa5d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.12.0-pyhd8ed1ab_0.tar.bz2#e572bf40b1e8783fed2526ecb5f5209e -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py37h2527ec5_2.tar.bz2#9aba6bcb02d12dbd2fead23b85720712 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.28.5-py37h5e8e339_0.tar.bz2#3761f28aaafe435080d26b00fbcd7af8 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.1.1-h83ec7ef_0.tar.bz2#ca8faaee04a83e3c4d6f708a35ac2ec3 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.10.0-hd8ed1ab_0.tar.bz2#1de4b4503b2803c1b0fcba6bb91ab274 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py37hb1e94ed_1006.tar.bz2#e06cf91c2624284413641be2cb8c3198 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py37he8f5f7f_0.tar.bz2#6ebf1968b199a141a5cce6adaedb3651 -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.1-pyhd8ed1ab_0.tar.bz2#6f857f10fe2960dce20d59d71a290d51 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.2.1-py37hb589d83_5.tar.bz2#ea78cbba7d43ad17ec043a9ebdee3bf5 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py37hac37412_8.tar.bz2#148f2e971a67831ed0691f63cd826468 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.1.1-py37h6f94858_1004.tar.bz2#42b37830a63405589fef3d13db505e7d -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py37hb1e94ed_1.tar.bz2#3a94b25c520754b56cdfa7d865806524 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py37hf2a6cf1_0.tar.bz2#129c613e1d0f09d9fd0473a0da6161a9 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py37h9b0f7a3_4.tar.bz2#568474687cd6be5f834cb682637ac0de -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py37h89c1867_1.tar.bz2#cbe5a8c8ae88d1e73b4297a73d08408a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py37hb1e94ed_2.tar.bz2#ba9daa43279450692efc63037867ed93 -https://conda.anaconda.org/conda-forge/noarch/identify-2.3.7-pyhd8ed1ab_0.tar.bz2#ae1a5e834fbca62ee88ab55fb276be63 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py37h1058ff1_0.tar.bz2#b431c18c1cf130f03d83498f2ef7047b -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h1364a43_6.tar.bz2#9caa0cf923af3d037897c6d7f8ea57c0 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py37hf784469_101.tar.bz2#5b05dc55e51be0696878e9a575c12f77 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.10-h54213e6_2.tar.bz2#b7ed7c76c9360db1f91afba2e220007b -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py37he336c9b_8.tar.bz2#2fe25d82cb4e59191df561c40870ca6b -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py37he336c9b_8.tar.bz2#0a67d477c0524897883ca0f86d6fb15c -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.1-py37h9a08e6e_5.tar.bz2#e44dc116f747b0a7bceaf1533acc6b48 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_0.tar.bz2#43694e152ee85559ddf64b1acb8801dd -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.16.0-py37h89c1867_0.tar.bz2#43b270fe44130353e540037ad27da097 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py37h89c1867_8.tar.bz2#8038f9765a907fcf6fdfa6a9db71e371 -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.7-pyhd8ed1ab_0.tar.bz2#be75bab4820a56f77ba1a3fc9139c36a -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py37h7352969_101.tar.bz2#64fd02e7a0cefe0b5c604fea03774c73 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h85b4f2f_1.tar.bz2#bc6418fd87ea67cf14417337ced3daa2 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py37h89c1867_0.tar.bz2#b14435faa62d35cea49a9183d595f145 -https://conda.anaconda.org/conda-forge/noarch/requests-2.26.0-pyhd8ed1ab_1.tar.bz2#358581cc782802270d77c454c73a811a -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.3.2-pyh6c4a22f_0.tar.bz2#e8ffaea0961c0d7a6767f2394042043d -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-1.0.0-pyhd8ed1ab_0.tar.bz2#9f633f2f2869184e31acfeae95b24345 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_1.tar.bz2#63d2f874f990fdcab47c822b608d6ade diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 97fdfc68b26..caf6a739b30 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -9,33 +9,34 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_11.tar.bz2#2dcb18a9a0fa31f4f29e5a9b3eade394 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_11.tar.bz2#0bf83958e788f1e75ba26154cb702afe +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_12.tar.bz2#f547bf125ab234cec9c89491b262fc2f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_12.tar.bz2#7ff3b832ba5e6918c0d026976359d065 https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.27-ha770c72_3.tar.bz2#49210aaa9080888f9f9b460c70202bd3 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.28-ha770c72_0.tar.bz2#56594fdd5a80774a80d546fbbccf2c03 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_11.tar.bz2#4ea2f9f83b617a7682e8aa05dcb37c6a -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_11.tar.bz2#1d16527c76842bf9c41e9399d39d8097 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_12.tar.bz2#33c165be455015cc74e8d857182f3f58 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_12.tar.bz2#763c5ec8116d984b4a33342236d7da36 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_11.tar.bz2#e3495f4f93cfd6b68021cbe2b5844cd5 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_12.tar.bz2#d34efbb8d7d6312c816b4bb647b818b1 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.2-h9c3ff4c_0.tar.bz2#0fb039650fa638f258fdc9e9ef125f52 +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.4-h9c3ff4c_0.tar.bz2#3cedab1fd76644efd516e1b271f2da95 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.1-h9c3ff4c_1.tar.bz2#17a5f413039ce1e105fab5df9c668eb5 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.2-h9c3ff4c_0.tar.bz2#fe9a66a351bfa7a84c3108304c7bcba5 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.2-h9c3ff4c_0.tar.bz2#6618c9b191638993f2a818c6529e1b49 +https://conda.anaconda.org/conda-forge/linux-64/icu-69.1-h9c3ff4c_0.tar.bz2#e0773c9556d588b062a4e1424a6a02fa https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h7f98852_0.tar.bz2#5c214edc675a7fb7cbb34b1d854e5141 https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.0-hf817b99_0.tar.bz2#b10bb2ebebfffa8800fa80ad3285719e https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 @@ -43,11 +44,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.1-h7f98852_0.tar.bz2#90607c4c0247f04ec98b48997de71c1a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.2-h7f98852_1.tar.bz2#46cf26ecc8775a0aab300ea1821aaa3c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h9c3ff4c_0.tar.bz2#fb31bcb7af058244479ca635d20f0f4a https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa @@ -62,33 +63,33 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_10 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_openblas.tar.bz2#4f93ba28c628a2c27cf39c055e6b219c +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-13_linux64_openblas.tar.bz2#8a4038563ed92dfa622bd72c0d8f31d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 +https://conda.anaconda.org/conda-forge/linux-64/libclang-13.0.0-default_hc23dcda_0.tar.bz2#7b140452b5bc91e46410b84807307249 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.1-ha95c52a_0.tar.bz2#4eec219a4bd69c11579601804cec5baf +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-ha95c52a_0.tar.bz2#5222b231b1ef49a7f60d40b363469b70 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-12_linux64_openblas.tar.bz2#2e5082d4a9a18c21100e6ce5b6bcb4ec +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-13_linux64_openblas.tar.bz2#b17676dbd6688396c3a3076259fb7907 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-12_linux64_openblas.tar.bz2#9f401a6807a97e0c859d7522ae3d51ec -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_1.tar.bz2#d0a7846b7b3b8fb0d8b36904a53b8155 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-13_linux64_openblas.tar.bz2#018b80e8f21d8560ae4961567e3e00c9 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.46.0-h812cca2_0.tar.bz2#507fa47e9075f889af8e8b72925379be https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h885dcf4_1.tar.bz2#d1355eaa48f465782f228275a0a69771 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.27-hfa10184_3.tar.bz2#7cd299934880b05703ee86a62325982f +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.28-hfa10184_0.tar.bz2#aac17542e50a474e2e632878dc696d50 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 @@ -97,39 +98,38 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_2.tar.bz2#6221115a24700aa8598ae5aa1574902d +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_3.tar.bz2#511aa83cdfcc0132380db5daf2f15f27 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.1-h3452ae3_0.tar.bz2#6d4bf6265d998b6c975c26a6a24062a2 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.2-h3452ae3_0.tar.bz2#c363665b4aabe56aae4f8981cff5b153 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.73-hb5efdd6_0.tar.bz2#a5b91a14292ac34bac1f0506a3772fd5 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 +https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-ha38a3c6_3_cpython.tar.bz2#bed445cebcd8f97dce76dc06201928ee https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.9-pyhd8ed1ab_0.tar.bz2#a57a3f6f2b0a7400e340f850c405df19 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.11-pyhd8ed1ab_0.tar.bz2#e51530e33440ea8044edb0076cb40a0f https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_0.tar.bz2#d05900c9b0ef4c3d1cef2e8a5c49350e -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.11.1-pyhd8ed1ab_0.tar.bz2#a510ec93fdb50775091d2afba98a8acb -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_2.tar.bz2#3cf866063f2803944ddaee8b1d6da531 -https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2#9c9aea4b8391264477df484f798562d0 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.94-ha180cfb_0.tar.bz2#c534c5248da4913002473919d76d0161 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.1.0-pyhd8ed1ab_0.tar.bz2#188e095f4dc38887bb48b065734b9e8d +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_3.tar.bz2#524a9f1718bac53a6cf4906bcc51d044 +https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.80.0-h2574ce0_0.tar.bz2#5d0784b790350f7939bb5d3f2c32e700 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.5-hd57d9b9_1.tar.bz2#a0f425d61c7df890d6381ea352c3f1d7 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.81.0-h2574ce0_0.tar.bz2#1f8655741d0269ca6756f131522da1e8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-14.1-hd57d9b9_1.tar.bz2#a7024916bfdf33a014a0cc803580c9a1 https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.3.0-pyhd8ed1ab_0.tar.bz2#7bc119135be2a43e1701432399d8c28a https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.6-pyhd8ed1ab_0.tar.bz2#3087df8c636c5a00e694605c39ce4982 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.7-pyhd8ed1ab_0.tar.bz2#727e2216d9c47455d8ddc060eb2caad9 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 @@ -143,78 +143,80 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.ta https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_1.tar.bz2#b689b2cbc8481b224777415e1a193170 https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha00ac49_1009.tar.bz2#d1dff57b8731c245d3247b46d002e1c9 https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py38h578d9bd_1.tar.bz2#52a6cee65a5d10ed1c3f0af24fb48dd3 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py38h3931269_0.tar.bz2#9c491a90ae11d08ca97326a0ed876f3a -https://conda.anaconda.org/conda-forge/linux-64/curl-7.80.0-h2574ce0_0.tar.bz2#4d8fd67e5ab7e00fde8ad085464f43b7 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.81.0-h2574ce0_0.tar.bz2#3a95d393b490f82aa406f1892fad84d9 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.1-py38h578d9bd_0.tar.bz2#26da12e39b1b93e82fb865e967d0cbe0 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1.tar.bz2#085365abfe53d5d13bb68b1dda0b439e -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6ad9fb6_0.tar.bz2#45142dc44fcd04934f9ad68ce205e54d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.5-py38h87f13fb_0.tar.bz2#07fef7a6a3c56e0410d047c0aa62416e +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.2-py38h6ae9a64_0.tar.bz2#065a900932f904e0182acfcfadc467e3 https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.0-h277dcde_0.tar.bz2#7ba8c7a9bf1c2fedf4a6d6dc92839baf +https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.1-h277dcde_0.tar.bz2#f2ceb1be6565c35e2db0ac948754751d https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_8.tar.bz2#11b72f5b1cc15427c89232321172a0bc https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_4.tar.bz2#9c4bbee6f682f2fc7d7803df3996e77e https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.2.0-py38h578d9bd_0.tar.bz2#cbaabcbc6fb460f1a515188e6d966fa2 +https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f +https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.7.1-py38h578d9bd_0.tar.bz2#8bf9c51a7e371df1673de909c1f46e6c https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_1.tar.bz2#37717ce393db8536ae2b613839af4274 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.13.0-py38h578d9bd_0.tar.bz2#561081f4a30990533541979c9ee84732 https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003.tar.bz2#9189b42c42b9c87b2b2068cbe31901a8 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.1.1-py38h6c62de6_1.tar.bz2#d4a47fd2bbc8292a322d462734b0ada5 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.2-py38h6c62de6_0.tar.bz2#73892e60ccea826c7f7a2215e48d22cf https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.12.0-pyhd8ed1ab_0.tar.bz2#e572bf40b1e8783fed2526ecb5f5209e -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py38h1fd1430_2.tar.bz2#482431310c7b3320a31c8c6ce82a7a15 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.28.5-py38h497a2fe_0.tar.bz2#f611d0be8205d5b0566f9c97e7d66ae3 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.9.1-h83ec7ef_1.tar.bz2#9a9e823b2e31e84e5ce06f54ffce9d70 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.1-pyhd8ed1ab_0.tar.bz2#7968db84df10b74d9792d66d7da216df +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.1-py38h497a2fe_0.tar.bz2#121e02be214af4980911bb2cbd5b2742 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.3.1-hb4a5f5f_0.tar.bz2#abe529a4b140720078f0febe1b6014a4 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py38h43a58ef_0.tar.bz2#171cc96da3b1a0ebd4bf2b5586b7cda3 -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.1-pyhd8ed1ab_0.tar.bz2#6f857f10fe2960dce20d59d71a290d51 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38hdd21e9b_0.tar.bz2#ceb8ec641cd5faa40b568f8ca008b6dc +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.0-py38h43a58ef_0.tar.bz2#23427f52c81076594a95c006ebf7552e +https://conda.anaconda.org/conda-forge/noarch/pip-22.0.3-pyhd8ed1ab_0.tar.bz2#45dedae69a0ea21cb8566d04b2ca5536 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h6c62de6_1.tar.bz2#a350e3f4ca899e95122f66806e048858 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py38h6c62de6_1.tar.bz2#2953d3fc0113fc6ffb955a5b72811fb0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py38h56a6a73_0.tar.bz2#2d318049369bb52d2687b0ac2be82751 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h800f7b8_4.tar.bz2#5fb9a3af4ebd8b21ca099e107306be72 +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar.bz2#ec3b783081e14a9dc0eb5ce609649728 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h1fd1430_1.tar.bz2#c494f75082f9c052944fda1b22c83336 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 -https://conda.anaconda.org/conda-forge/noarch/identify-2.3.7-pyhd8ed1ab_0.tar.bz2#ae1a5e834fbca62ee88ab55fb276be63 +https://conda.anaconda.org/conda-forge/noarch/identify-2.4.8-pyhd8ed1ab_0.tar.bz2#d4d25c0b7c1a7a1b0442e061fdd49260 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h1364a43_6.tar.bz2#9caa0cf923af3d037897c6d7f8ea57c0 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py38h2823cc8_101.tar.bz2#1dfe1cdee4532c72f893955259eb3de9 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.10-hb8ff022_1.tar.bz2#f67c24bfd760cd50c285556ee7507853 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.3-h9967ed3_0.tar.bz2#37f1c68380bc5dfe0f5bb2655e207a73 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2#1d7e241dfaf5475e893d4b824bb71b44 https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.1-py38h2f98cf7_5.tar.bz2#8f989133575134016a0def90ae965e85 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.2-py38ha217159_3.tar.bz2#d7461e191f7a0522e4709612786bdf4e https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_0.tar.bz2#43694e152ee85559ddf64b1acb8801dd +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-h0a9e6e8_2.tar.bz2#aa768fdaad03509a97df37f81163346b https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.16.0-py38h578d9bd_0.tar.bz2#61e1e83f0eccef5e449db03c340ab6c2 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.17.0-py38h578d9bd_0.tar.bz2#839ac9dba9a6126c9532781a9ea4506b https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_8.tar.bz2#88368a5889f31dff922a2d57bbfc3f5b -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.7-pyhd8ed1ab_0.tar.bz2#be75bab4820a56f77ba1a3fc9139c36a +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.8-pyhd8ed1ab_1.tar.bz2#53f1387c68c21cecb386e2cde51b3f7c https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h85b4f2f_1.tar.bz2#bc6418fd87ea67cf14417337ced3daa2 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h8e749b2_2.tar.bz2#8c20fd968c8b6af73444b1199d5fb0cb https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py38h578d9bd_0.tar.bz2#0d78be9cf1c400ba8e3077cf060492f1 -https://conda.anaconda.org/conda-forge/noarch/requests-2.26.0-pyhd8ed1ab_1.tar.bz2#358581cc782802270d77c454c73a811a -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.3.2-pyh6c4a22f_0.tar.bz2#e8ffaea0961c0d7a6767f2394042043d +https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2#7c1c427246b057b8fa97200ecdb2ed62 +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.4.0-pyh6c4a22f_1.tar.bz2#a9025d14c2a609e0d895ad3e75b5369c https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml deleted file mode 100644 index 094ae847027..00000000000 --- a/requirements/ci/py37.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: iris-dev - -channels: - - conda-forge - -dependencies: - - python =3.7 - -# Setup dependencies. - - setuptools >=40.8.0 - -# Core dependencies. - - cartopy >=0.20 - - cf-units >=3 - - cftime >=1.5 - - dask-core >=2 - - matplotlib - - netcdf4 - - numpy >=1.19 - - python-xxhash - - scipy - -# Optional dependencies. - - esmpy >=7.0 - - graphviz - - iris-sample-data >=2.4.0 - - mo_pack - - nc-time-axis >=1.3 - - pandas - - pip - - python-stratify - -# Test dependencies. - - filelock - - imagehash >=4.0 - - nose - - pillow <7 - - pre-commit - - requests - -# Documentation dependencies. - - sphinx - - sphinxcontrib-napoleon - - sphinx-copybutton - - sphinx-gallery - - sphinx-panels - - sphinx_rtd_theme diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index d3d7f9d0c2c..ef095815c9b 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -25,7 +25,7 @@ dependencies: - graphviz - iris-sample-data >=2.4.0 - mo_pack - - nc-time-axis >=1.3 + - nc-time-axis >=1.4 - pandas - pip - python-stratify diff --git a/setup.cfg b/setup.cfg index 501614e9d04..ecdcad85b2c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ author = SciTools Developers author_email = scitools-iris-dev@googlegroups.com classifiers = - Development Status :: 5 Production/Stable + Development Status :: 5 - Production/Stable Intended Audience :: Science/Research License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) Operating System :: MacOS @@ -11,7 +11,6 @@ classifiers = Operating System :: Unix Programming Language :: Python Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering @@ -59,7 +58,7 @@ packages = find: package_dir = =lib python_requires = - >=3.7 + >=3.8 zip_safe = False [options.packages.find] @@ -82,7 +81,7 @@ test = requests all = mo_pack - nc-time-axis>=1.3 + nc-time-axis>=1.4 pandas stratify %(docs)s diff --git a/tools/gen_helpers.py b/tools/gen_helpers.py deleted file mode 100644 index 825c78139e5..00000000000 --- a/tools/gen_helpers.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -from datetime import datetime -import os -import os.path - -HEADER = \ - '''# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# -# DO NOT EDIT: AUTO-GENERATED''' - - -def absolute_path(path): - return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) - - -def prep_module_file(module_path): - """ - prepare a module file, creating directory if needed and writing the - header into that file - - """ - module_path = absolute_path(module_path) - module_dir = os.path.dirname(module_path) - if not os.path.isdir(module_dir): - os.makedirs(module_dir) - with open(module_path, 'w') as module_file: - module_file.write(HEADER.format(datetime.utcnow().year)) diff --git a/tools/gen_stash_refs.py b/tools/gen_stash_refs.py deleted file mode 100644 index e614b52ab2f..00000000000 --- a/tools/gen_stash_refs.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -import json -import urllib -import urllib2 - -from iris.fileformats.pp import STASH - -import gen_helpers - - -HEADER = ''' -""" -Auto-generated from iris/tools/gen_stash_refs.py -Relates grid code and field code to the stash code. - -""" -''' - -CODE_PREAMBLE = ("from collections import namedtuple\n\n\n" - "Stash = namedtuple('Stash', " - "'grid_code field_code pseudo_level_type')\n\n\n") - - -def _value_from_xref(xref, name): - """Return the value for the key name from xref. - - Will return 0 if the key does not look like an integer. - """ - - result = xref.get(name) - try: - int(result) - except (ValueError, TypeError): - result = 0 - return result - - -def write_cross_reference_module(module_path, xrefs): - gen_helpers.prep_module_file(module_path) - with open(module_path, 'a') as module_file: - module_file.write(HEADER) - module_file.write(CODE_PREAMBLE) - module_file.write('STASH_TRANS = {\n') - for xref in xrefs: - stash = xref.get('stash') - try: - STASH.from_msi(stash.replace('"', '')) - except ValueError: - msg = ('stash code is not of a recognised' - '"m??s??i???" form: {}'.format(stash)) - print(msg) - grid = xref.get('grid') - if grid is not None: - try: - int(grid) - except ValueError: - msg = ('grid code retrieved from STASH lookup' - 'is not an integer: {}'.format(grid)) - print(msg) - else: - grid = 0 - - lbfc = _value_from_xref(xref, 'lbfcn') - pseudT = _value_from_xref(xref, 'pseudT') - - module_file.write( - ' "{}": Stash({}, {}, {}),\n'.format(stash, - grid, - lbfc, - pseudT)) - module_file.write('}\n') - - -def stash_grid_retrieve(): - """return a dictionary of stash codes and rel;ated information from - the Met Office Reference Registry - """ - baseurl = 'http://reference.metoffice.gov.uk/system/query?query=' - query = '''prefix rdf: -prefix rdfs: -prefix skos: - -SELECT ?stash ?grid ?lbfcn ?pseudT -WHERE { - ?stashcode rdf:type ; - skos:notation ?stash ; - ?gridcode . -OPTIONAL { ?gridcode skos:notation ?grid .} -OPTIONAL {?stashcode ?lbfc . - ?lbfc skos:notation ?lbfcn .} -OPTIONAL {?stashcode ?pseudT_id . - ?pseudT_id skos:notation ?pseudT . } -} -order by ?stash''' - - encquery = urllib.quote_plus(query) - out_format = '&output=json' - url = baseurl + encquery + out_format - - response = urllib2.urlopen(url) - stash = json.loads(response.read()) - - ## heads will be of the form [u'stash', u'grid', u'lbfcn', u'pseudT'] - ## as defined in the query string - heads = stash['head']['vars'] - - stashcodes = [] - - for result in stash['results']['bindings']: - res = {} - for head in heads: - if head in result: - res[head] = result[head]['value'] - stashcodes.append(res) - return stashcodes - - -if __name__ == '__main__': - xrefs = stash_grid_retrieve() - outfile = '../lib/iris/fileformats/_ff_cross_references.py' - write_cross_reference_module(outfile, xrefs) diff --git a/tools/gen_translations.py b/tools/gen_translations.py deleted file mode 100644 index 5ac0dc02bac..00000000000 --- a/tools/gen_translations.py +++ /dev/null @@ -1,216 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Processing of metarelate metOcean content to provide Iris encodings of -metOcean mapping translations. - -""" - -from datetime import datetime -import os.path -import requests -import sys - -import metarelate -from metarelate.fuseki import FusekiServer - -from translator import (FORMAT_URIS, FieldcodeCFMappings, StashCFNameMappings, - StashCFHeightConstraintMappings, - CFFieldcodeMappings, - GRIB1LocalParamCFConstrainedMappings, - GRIB1LocalParamCFMappings, GRIB2ParamCFMappings, - CFConstrainedGRIB1LocalParamMappings, - CFGRIB2ParamMappings, CFGRIB1LocalParamMappings) - -HEADER = """# Copyright {name} contributors -# -# This file is part of {name} and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# -# DO NOT EDIT: AUTO-GENERATED -# Created on {datestamp} from -# http://www.metarelate.net/metOcean -# at commit {git_sha} -# https://github.com/metarelate/metOcean/commit/{git_sha} -{doc_string} - - -from collections import namedtuple - - -CFName = namedtuple('CFName', 'standard_name long_name units') -""" - -HEADER_GRIB = """ -DimensionCoordinate = namedtuple('DimensionCoordinate', - 'standard_name units points') - -G1LocalParam = namedtuple('G1LocalParam', 'edition t2version centre iParam') -G2Param = namedtuple('G2Param', 'edition discipline category number') -""" - -DOC_STRING_GRIB = r'''""" -Provides GRIB/CF phenomenon translations. - -"""''' - -DOC_STRING_UM = r'''""" -Provides UM/CF phenomenon translations. - -"""''' - -YEAR = datetime.utcnow().year - -def _retrieve_mappings(fuseki, source, target): - """ - Interrogate the metarelate triple store for all - phenomenon translation mappings from the source - scheme to the target scheme. - - Args: - * fuseki: - The :class:`metrelate.fuseki.FusekiServer` instance. - * source: - The source metarelate metadata type for the mapping. - * target: - The target metarelate metadata type for the mapping. - - Return: - The sequence of :class:`metarelate.Mapping` - instances. - - """ - suri = 'http://www.metarelate.net/sparql/metOcean' - msg = 'Retrieving {!r} to {!r} mappings ...' - print(msg.format(source, target)) - return fuseki.retrieve_mappings(source, target, service=suri) - - -def build_um_cf_map(fuseki, now, git_sha, base_dir): - """ - Encode the UM/CF phenomenon translation mappings - within the specified file. - - Args: - * fuseki: - The :class:`metarelate.fuseki.FusekiServer` instance. - * now: - Time stamp to write into the file - * git_sha: - The git SHA1 of the metarelate commit - * base_dir: - The root directory of the Iris source. - - """ - filename = os.path.join(base_dir, 'lib', 'iris', 'fileformats', - 'um_cf_map.py') - - # Create the base directory. - if not os.path.exists(os.path.dirname(filename)): - os.makedirs(os.path.dirname(filename)) - - # Create the file to contain UM/CF translations. - with open(filename, 'w') as fh: - fh.write(HEADER.format(year=YEAR, doc_string=DOC_STRING_UM, - datestamp=now, git_sha=git_sha, name='Iris')) - fh.write('\n') - - # Encode the relevant UM to CF translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['umf'], - FORMAT_URIS['cff']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - fccf = FieldcodeCFMappings(maps) - stcf = StashCFNameMappings(maps) - stcfhcon = StashCFHeightConstraintMappings(maps) - fh.writelines(fccf.lines(fuseki)) - fh.writelines(stcf.lines(fuseki)) - fh.writelines(stcfhcon.lines(fuseki)) - - # Encode the relevant CF to UM translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['cff'], - FORMAT_URIS['umf']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - cffc = CFFieldcodeMappings(maps) - fh.writelines(cffc.lines(fuseki)) - - -def build_grib_cf_map(fuseki, now, git_sha, base_dir): - """ - Encode the GRIB/CF phenomenon translation mappings - within the specified file. - - Args: - * fuseki: - The :class:`metarelate.fuseki.FusekiServer` instance. - * now: - Time stamp to write into the file - * git_sha: - The git SHA1 of the metarelate commit - * base_dir: - The root directory of the Iris source. - - """ - filename = os.path.join(base_dir, 'lib', 'iris', 'fileformats', - 'grib', '_grib_cf_map.py') - if not os.path.exists(os.path.dirname(filename)): - os.makedirs(os.path.dirname(filename)) - - # Create the file to contain GRIB/CF translations. - with open(filename, 'w') as fh: - fh.write(HEADER.format(year=YEAR, doc_string=DOC_STRING_GRIB, - datestamp=now, git_sha=git_sha, - name='iris-grib')) - fh.write(HEADER_GRIB) - fh.write('\n') - - # Encode the relevant GRIB to CF translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['gribm'], - FORMAT_URIS['cff']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - g1cfc = GRIB1LocalParamCFConstrainedMappings(maps) - g1c = GRIB1LocalParamCFMappings(maps) - g2c = GRIB2ParamCFMappings(maps) - fh.writelines(g1cfc.lines(fuseki)) - fh.writelines(g1c.lines(fuseki)) - fh.writelines(g2c.lines(fuseki)) - - # Encode the relevant CF to GRIB translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['cff'], - FORMAT_URIS['gribm']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - cfcg1 = CFConstrainedGRIB1LocalParamMappings(maps) - cg1 = CFGRIB1LocalParamMappings(maps) - cg2 = CFGRIB2ParamMappings(maps) - fh.writelines(cfcg1.lines(fuseki)) - fh.writelines(cg1.lines(fuseki)) - fh.writelines(cg2.lines(fuseki)) - - -def main(): - # Protect metarelate resource from 1.0 emergent bug - if not float(metarelate.__version__) >= 1.1: - raise ValueError("Please ensure that Metarelate Version is >= 1.1") - now = datetime.utcnow().strftime('%d %B %Y %H:%m') - git_sha = requests.get('http://www.metarelate.net/metOcean/latest_sha').text - gen_path = os.path.abspath(sys.modules['__main__'].__file__) - iris_path = os.path.dirname(os.path.dirname(gen_path)) - with FusekiServer() as fuseki: - build_um_cf_map(fuseki, now, git_sha, iris_path) - build_grib_cf_map(fuseki, now, git_sha, iris_path) - - if (git_sha != - requests.get('http://www.metarelate.net/metOcean/latest_sha').text): - raise ValueError('The metarelate translation store has altered during' - 'your retrieval, the results may not be stable.\n' - 'Please rerun your retrieval.') - -if __name__ == '__main__': - main() diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 95dcce8171f..08bacbe1e07 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -13,7 +13,9 @@ By default, Iris will use the source XML file: etc/cf-standard-name-table.xml as obtained from: - http://cf-pcmdi.llnl.gov/documents/cf-standard-names + http://cfconventions.org/standard-names.html + E.G. http://cfconventions.org/Data/cf-standard-names/78/src/cf-standard-name-table.xml + - N.B. no fixed 'latest' url is provided. """ diff --git a/tools/translator/__init__.py b/tools/translator/__init__.py deleted file mode 100644 index a83fee4edd8..00000000000 --- a/tools/translator/__init__.py +++ /dev/null @@ -1,1116 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Provides the framework to support the encoding of metarelate mapping -translations. - -""" - -from abc import ABCMeta, abstractmethod -from collections import deque, namedtuple -import copy -from queue import Queue -import re -from threading import Thread -import warnings - -from metarelate.fuseki import FusekiServer, WorkerThread, MAXTHREADS -import metarelate - -# known format identifier URIs -FORMAT_URIS = {'cff': '', - 'gribm': '', - 'umf': ''} - -CFName = namedtuple('CFName', 'standard_name long_name units') -DimensionCoordinate = namedtuple('DimensionCoordinate', - 'standard_name units points') -G1LocalParam = namedtuple('G1LocalParam', 'edition t2version centre iParam') -G2Param = namedtuple('G2Param', 'edition discipline category number') - - -class MappingEncodeWorker(WorkerThread): - """Worker thread class for handling EncodableMap instances""" - def dowork(self, resource): - resource.encode(self.fuseki_process) - - -class EncodableMap: - """ - A metarelate mapping able to encode itself as a string for use in Iris, - as defined by a translator Mappings subclass - - """ - def __init__(self, mapping, sourcemsg, targetmsg, sourceid, targetid): - """ - Args: - * mapping: - A :class:`metarelate.Mapping` instance representing a translation. - * sourcemsg: - The code snippet message for the source of the translation for - formatting - * targetmsg: - The code snippet message for the target of the translation for - formatting - * sourceid: - A dictionary of required key:value pairs required by the sourcemsg - * targetid: - A dictionary of required key:value pairs required by the targetmsg - - """ - self.mapping = mapping - self.sourcemsg = sourcemsg - self.targetmsg = targetmsg - self.sourceid = sourceid - self.targetid = targetid - self.encoding = None - - def encode(self, fuseki_process): - """ - Return a string of the Python source code required to represent an - entry in a dictionary mapping source to target. - - Args: - * fuseki_process: - A :class:`metarelate.fuseki.FusekiServer` instance. - - """ - sids, tids = self.mapping.get_identifiers(fuseki_process) - self.sourceid.update(sids) - self.targetid.update(tids) - self.encoding = '{}: {}'.format(self.sourcemsg.format(**self.sourceid), - self.targetmsg.format(**self.targetid)) - - -class Mappings(metaclass=ABCMeta): - """ - Abstract base class to support the encoding of specific metarelate - mapping translations. - - """ - - def __init__(self, mappings): - """ - Filter the given sequence of mappings for those member - :class:`metarelate.Mapping` translations containing a source - :class:`metarelate.Component` with a matching - :attribute:`Mapping.source_scheme` and a target - :class:`metarelate.Component` with a matching - :attribute:`Mapping.target_scheme`. - - Also see :method:`Mapping.valid_mapping` for further matching - criterion for candidate metarelate mapping translations. - - Args: - * mappings: - Iterator of :class:`metarelate.Mapping` instances. - - """ - temp = [] - # Filter the mappings for the required type of translations. - for mapping in mappings: - source = mapping.source - target = mapping.target - sourcemsg, targetmsg = self.msg_strings() - sourceid, targetid = self.get_initial_id_nones() - if source.com_type == self.source_scheme and \ - target.com_type == self.target_scheme and \ - self.valid_mapping(mapping): - temp.append(EncodableMap(mapping, sourcemsg, targetmsg, - sourceid, targetid)) - self.mappings = temp - if len(self) == 0: - msg = '{!r} contains no mappings.' - warnings.warn(msg.format(self.__class__.__name__)) - - def _sort_lines(self, payload): - """ - Return the payload, unsorted. - - """ - return payload - - def lines(self, fuseki_process): - """ - Provides an iterator generating the encoded string representation - of each member of this metarelate mapping translation. - - Returns: - An iterator of string. - - """ - msg = '\tGenerating phenomenon translation {!r}.' - print(msg.format(self.mapping_name)) - lines = ['\n%s = {\n' % self.mapping_name] - # Retrieve encodings for the collection of mapping instances. - # Retrieval is threaded as it is heavily bound by resource resolution - # over http. - # Queue for metarelate mapping instances - mapenc_queue = Queue() - for mapping in self.mappings: - mapenc_queue.put(mapping) - # deque to contain the results of the jobs processed from the queue - mapencs = deque() - # run worker threads - for i in range(MAXTHREADS): - MappingEncodeWorker(mapenc_queue, mapencs, fuseki_process).start() - # block progress until the queue is empty - mapenc_queue.join() - # end of threaded retrieval process. - - # now sort the payload - payload = [mapenc.encoding for mapenc in mapencs] - payload.sort(key=self._key) - lines.extend(payload) - lines.append(' }\n') - return iter(lines) - - def __len__(self): - return len(self.mappings) - - def _key(self, line): - """Method to provide the sort key of the mappings order.""" - return line - - @property - @abstractmethod - def mapping_name(self): - """ - Abstract property that specifies the name of the dictionary - to contain the encoding of this metarelate mapping translation. - - """ - - @property - @abstractmethod - def source_scheme(self): - """ - Abstract property that specifies the name of the scheme for - the source :class:`metarelate.Component` defining this metarelate - mapping translation. - - """ - - @property - @abstractmethod - def target_scheme(self): - """ - Abstract property that specifies the name of the scheme for - the target :class:`metarelate.Component` defining this metarelate - mapping translation. - - """ - - @abstractmethod - def valid_mapping(self, mapping): - """ - Abstract method that determines whether the provided - :class:`metarelate.Mapping` is a translation from the required - source :class:`metarelate.Component` to the required target - :class:`metarelate.Component`. - - """ - - def get_initial_id_nones(self): - """ - Return the identifier items which may not exist, in the translation - database, and are needed for a msg_string. These must exist, even - even if not written from the database. - - Returns two dictionaries to use as the start point for - population from the database. - - """ - sourceid = {} - targetid = {} - return sourceid, targetid - - def is_cf(self, comp): - """ - Determines whether the provided component from a mapping - represents a simple CF component of the given kind. - - Args: - * component: - A :class:`metarelate.Component` or - :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - kind = FORMAT_URIS['cff'] - result = False - result = hasattr(comp, 'com_type') and \ - comp.com_type == kind and \ - hasattr(comp, 'units') and \ - len(comp) in [1, 2] - return result - - def is_cf_constrained(self, comp): - """ - Determines whether the provided component from a mapping - represents a compound CF component for a phenomenon and - one, single valued dimension coordinate. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - ftype = FORMAT_URIS['cff'] - result = False - cffield = hasattr(comp, 'com_type') and comp.com_type == ftype and \ - hasattr(comp, 'units') and (hasattr(comp, 'standard_name') or - hasattr(comp, 'long_name')) - dimcoord = hasattr(comp, 'dim_coord') and \ - isinstance(comp.dim_coord, metarelate.ComponentProperty) and \ - comp.dim_coord.component.com_type.notation == 'DimCoord' - result = cffield and dimcoord - return result - - def is_cf_height_constrained(self, comp): - item_sn = metarelate.Item((''), - 'standard_name') - item_h = metarelate.Item((''), - 'height') - snprop = metarelate.StatementProperty(item_sn, item_h) - item_u = metarelate.Item((''), - 'units') - uprop = metarelate.StatementProperty(item_u, - metarelate.Item('"m"', 'm')) - pts_pred = metarelate.Item((''), - 'points') - result = False - if self.is_cf_constrained(comp): - props = comp.dim_coord.component.properties - if len(props) == 3: - if snprop in props and uprop in props: - preds = [prop.predicate for prop in props] - if pts_pred in preds: - result = True - return result - - def is_fieldcode(self, component): - """ - Determines whether the provided concept from a mapping - represents a simple UM concept for a field-code. - - Args: - * concept: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = False - result = hasattr(component, 'lbfc') and len(component) == 1 - return result - - def is_grib1_local_param(self, component): - """ - Determines whether the provided component from a mapping - represents a simple GRIB edition 1 component for a local - parameter. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = len(component) == 1 and hasattr(component, 'grib1_parameter') - return result - - def is_grib2_param(self, component): - """ - Determines whether the provided component from a mapping - represents a simple GRIB edition 2 component for a parameter. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - - result = len(component) == 1 and hasattr(component, 'grib2_parameter') - return result - - def is_stash(self, component): - """ - Determines whether the provided concept for a mapping - represents a simple UM concept for a stash-code. - - Args: - * concept: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = False - result = hasattr(component, 'stash') and len(component) == 1 - return result - - -def _cfn(line): - """ - Helper function to parse dictionary lines using the CFName named tuple. - Matches to the line ' CFName({standard_name}, {long_name}, {units}:*) - giving access to these named parts - - """ - match = re.match('^ CFName\((.+), (.+), (.+)\):.+,', line) - if match is None: - raise ValueError('encoding not sortable') - standard_name, long_name, units = match.groups() - if standard_name == 'None': - standard_name = None - if long_name == 'None': - long_name = None - return [standard_name, long_name, units] - - -class CFFieldcodeMappings(Mappings): - """ - Represents a container for CF phenomenon to UM field-code metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name, - and units to UM field-code. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - '{lbfc},\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_LBFC' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF to UM field-code translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and self.is_fieldcode(mapping.target) - - -class FieldcodeCFMappings(Mappings): - """ - Represents a container for UM field-code to CF phenomenon metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM field-code to - CF standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return int(line.split(':')[0].strip()) - - def msg_strings(self): - return (' {lbfc}', - 'CFName({standard_name!r}, {long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'LBFC_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM field-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_fieldcode(mapping.source) and self.is_cf(mapping.target) - - -class StashCFNameMappings(Mappings): - """ - Represents a container for UM stash-code to CF phenomenon metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM stash-code to CF - standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return(' {stash!r}', - 'CFName({standard_name!r}, ' - '{long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'STASH_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM stash-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return (self.is_stash(mapping.source) and - (self.is_cf(mapping.target) or - self.is_cf_constrained(mapping.target))) - - -class StashCFHeightConstraintMappings(Mappings): - """ - Represents a container for UM stash-code to CF phenomenon metarelate - mapping translations where a singular height constraint is defined by - the STASH code. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM stash-code to CF - standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return(' {stash!r}', - '{dim_coord[points]},\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'STASHCODE_IMPLIED_HEIGHTS' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM stash-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return (self.is_stash(mapping.source) and - self.is_cf_height_constrained(mapping.target)) - - -class GRIB1LocalParamCFMappings(Mappings): - """ - Represents a container for GRIB (edition 1) local parameter to - CF phenomenon metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB1 edition, table II version, - centre and indicator of parameter to CF standard name, long name and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - matchstr = ('^ G1LocalParam\(([0-9]+), ([0-9]+), ' - '([0-9]+), ([0-9]+)\):.*') - match = re.match(matchstr, line) - if match is None: - raise ValueError('encoding not sortable') - return [int(i) for i in match.groups()] - - def msg_strings(self): - return (' G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter})', - 'CFName({standard_name!r}, ' - '{long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB1_LOCAL_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB1 local parameter to CF phenomenon translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib1_local_param(mapping.source) and \ - self.is_cf(mapping.target) - - -class CFGRIB1LocalParamMappings(Mappings): - """ - Represents a container for CF phenomenon to GRIB (edition 1) local - parameter metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name - and units to GRIB1 edition, table II version, centre and indicator of - parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - 'G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_GRIB1_LOCAL' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon to GRIB1 local parameter translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and \ - self.is_grib1_local_param(mapping.target) - - -class GRIB1LocalParamCFConstrainedMappings(Mappings): - """ - Represents a container for GRIB (edition 1) local parameter to - CF phenomenon and dimension coordinate constraint metarelate mapping - translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB1 edition, table II version, - centre and indicator of parameter to CF phenomenon standard name, long name - and units, and CF dimension coordinate standard name, units and points. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return (' G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter})', - '(CFName({standard_name!r}, ' - '{long_name!r}, {units!r}), ' - 'DimensionCoordinate({dim_coord[standard_name]!r}, ' - '{dim_coord[units]!r}, {dim_coord[points]})),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB1_LOCAL_TO_CF_CONSTRAINED' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB1 local parameter to CF phenomenon and dimension coordinate - translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib1_local_param(mapping.source) and \ - self.is_cf_constrained(mapping.target) - - -class CFConstrainedGRIB1LocalParamMappings(Mappings): - """ - Represents a container for CF phenomenon and dimension coordinate - constraint to GRIB (edition 1) local parameter metarelate mapping - translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF phenomenon standard name, - long name and units, and CF dimension coordinate standard name, units and - points to GRIB1 edition, table II version, centre and indicator of - parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return (' (CFName({standard_name!r}, ' - '{long_name!r}, {units!r}), ' - 'DimensionCoordinate({dim_coord[standard_name]!r}, ' - '{dim_coord[units]!r}, {dim_coord[points]}))', - 'G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_CONSTRAINED_TO_GRIB1_LOCAL' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon and dimension coordinate to GRIB1 local parameter - translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf_constrained(mapping.source) and \ - self.is_grib1_local_param(mapping.target) - - -class GRIB2ParamCFMappings(Mappings): - """ - Represents a container for GRIB (edition 2) parameter to CF phenomenon - metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB2 edition, discipline, - parameter category and indicator of parameter to CF standard name, - long name and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - matchstr = ('^ G2Param\(([0-9]+), ([0-9]+), ([0-9]+), ' - '([0-9]+)\):.*') - match = re.match(matchstr, line) - if match is None: - raise ValueError('encoding not sortable') - return [int(i) for i in match.groups()] - - def msg_strings(self): - return (' G2Param({editionNumber}, {discipline}, ' - '{parameterCategory}, {parameterNumber})', - 'CFName({standard_name!r}, {long_name!r}, ' - '{units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB2_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB2 parameter to CF phenomenon translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib2_param(mapping.source) and \ - self.is_cf(mapping.target) - - -class CFGRIB2ParamMappings(Mappings): - """ - Represents a container for CF phenomenon to GRIB (edition 2) parameter - metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name - and units to GRIB2 edition, discipline, parameter category and indicator - of parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - 'G2Param({editionNumber}, {discipline}, ' - '{parameterCategory}, {parameterNumber}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_GRIB2' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon to GRIB2 parameter translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and \ - self.is_grib2_param(mapping.target)