diff --git a/.cirrus.yml b/.cirrus.yml deleted file mode 100644 index 92b8d788e6..0000000000 --- a/.cirrus.yml +++ /dev/null @@ -1,198 +0,0 @@ -# Reference: -# - https://cirrus-ci.org/guide/writing-tasks/ -# - https://cirrus-ci.org/guide/writing-tasks/#environment-variables -# - https://cirrus-ci.org/guide/tips-and-tricks/#sharing-configuration-between-tasks -# - https://cirrus-ci.org/guide/linux/ -# - https://hub.docker.com/_/gcc/ -# - https://hub.docker.com/_/python/ - -# -# Global defaults. -# -container: - image: gcc:latest - cpu: 2 - memory: 4G - - -env: - # Skip specific tasks by name. Set to a non-empty string to skip. - SKIP_LINT_TASK: "" - SKIP_TEST_TASK: "" - SKIP_DOCTEST_TASK: "" - SKIP_LINKCHECK_TASK: "" - # Skip task groups by type. Set to a non-empty string to skip. - SKIP_ALL_DOC_TASKS: "" - # Maximum cache period (in weeks) before forcing a new cache upload. - CACHE_PERIOD: "2" - # Increment the build number to force new cartopy cache upload. - CARTOPY_CACHE_BUILD: "3" - # Increment the build number to force new conda cache upload. - CONDA_CACHE_BUILD: "0" - # Increment the build number to force new nox cache upload. - NOX_CACHE_BUILD: "0" - # Increment the build number to force new pip cache upload. - PIP_CACHE_BUILD: "0" - # Pip packages to be upgraded/installed. - PIP_CACHE_PACKAGES: "nox pip pyyaml setuptools wheel" - # Conda packages to be installed. - CONDA_CACHE_PACKAGES: "nox pip" - # Git commit hash for iris test data. - IRIS_TEST_DATA_VERSION: "2.5" - # Base directory for the iris-test-data. - IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data - - -# -# YAML alias for common linux test infra-structure. -# -linux_task_template: &LINUX_TASK_TEMPLATE - auto_cancellation: true - env: - PATH: ${HOME}/miniconda/bin:${PATH} - SITE_CFG: ${CIRRUS_WORKING_DIR}/lib/iris/etc/site.cfg - conda_cache: - folder: ${HOME}/miniconda - fingerprint_script: - - wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - - echo "${CIRRUS_OS} $(sha256sum miniconda.sh)" - - echo "${CONDA_CACHE_PACKAGES}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CONDA_CACHE_BUILD}" - - uname -r - populate_script: - - bash miniconda.sh -b -p ${HOME}/miniconda - - conda config --set always_yes yes --set changeps1 no - - conda config --set show_channel_urls True - - conda config --add channels conda-forge - - conda update --quiet --name base conda - - conda install --quiet --name base ${CONDA_CACHE_PACKAGES} - cartopy_cache: - folder: ${HOME}/.local/share/cartopy - fingerprint_script: - - echo "${CIRRUS_OS}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CARTOPY_CACHE_BUILD}" - populate_script: - - conda create --quiet --name cartopy-cache cartopy - - source ${HOME}/miniconda/etc/profile.d/conda.sh >/dev/null 2>&1 - - conda activate cartopy-cache >/dev/null 2>&1 - - cd $(mktemp -d) - - wget --quiet https://raw.githubusercontent.com/SciTools/cartopy/v0.20.0/tools/cartopy_feature_download.py - - python cartopy_feature_download.py physical --output ${HOME}/.local/share/cartopy --no-warn - - conda deactivate >/dev/null 2>&1 - nox_cache: - folder: ${CIRRUS_WORKING_DIR}/.nox - reupload_on_changes: true - fingerprint_script: - - echo "${CIRRUS_TASK_NAME}" - - echo "${NOX_CACHE_BUILD}" - - -# -# YAML alias for compute credits. -# -compute_credits_template: &CREDITS_TEMPLATE - # Restrict where compute credits are used. - use_compute_credits: ${CIRRUS_REPO_FULL_NAME} == "SciTools/iris" && ${CIRRUS_USER_COLLABORATOR} == "true" && ${CIRRUS_PR_DRAFT} == "false" && ${CIRRUS_PR} != "" - - -# -# YAML alias for the iris-test-data cache. -# -iris_test_data_template: &IRIS_TEST_DATA_TEMPLATE - data_cache: - folder: ${IRIS_TEST_DATA_DIR} - fingerprint_script: - - echo "iris-test-data v${IRIS_TEST_DATA_VERSION}" - populate_script: - - wget --quiet https://github.com/SciTools/iris-test-data/archive/v${IRIS_TEST_DATA_VERSION}.zip -O iris-test-data.zip - - unzip -q iris-test-data.zip - - mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${IRIS_TEST_DATA_DIR} - - -# -# Linting -# -task: - only_if: ${SKIP_LINT_TASK} == "" - << : *CREDITS_TEMPLATE - auto_cancellation: true - container: - image: python:3.8 - cpu: 2 - memory: 4G - name: "${CIRRUS_OS}: pre-commit hooks" - pip_cache: - folder: ~/.cache/pip - fingerprint_script: - - echo "${CIRRUS_TASK_NAME} py${PYTHON_VERSION}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${PIP_CACHE_BUILD} ${PIP_CACHE_PACKAGES}" - precommit_script: - - pip list - - python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES} - - pip list - - nox --session precommit - - -# -# Testing (Linux) -# -task: - only_if: ${SKIP_TEST_TASK} == "" - << : *CREDITS_TEMPLATE - matrix: - env: - PY_VER: 3.8 - name: "${CIRRUS_OS}: py${PY_VER} tests" - container: - image: gcc:latest - cpu: 6 - memory: 8G - << : *IRIS_TEST_DATA_TEMPLATE - << : *LINUX_TASK_TEMPLATE - tests_script: - - echo "[Resources]" > ${SITE_CFG} - - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} - - nox --session tests -- --verbose - - -# -# Documentation Testing and Gallery (Linux) -# -task: - only_if: ${SKIP_DOCTEST_TASK} == "" && ${SKIP_ALL_DOC_TASKS} == "" - << : *CREDITS_TEMPLATE - env: - PY_VER: 3.8 - MPL_RC_DIR: ${HOME}/.config/matplotlib - MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc - name: "${CIRRUS_OS}: py${PY_VER} doctests and gallery" - << : *IRIS_TEST_DATA_TEMPLATE - << : *LINUX_TASK_TEMPLATE - tests_script: - - echo "[Resources]" > ${SITE_CFG} - - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} - - mkdir -p ${MPL_RC_DIR} - - echo "backend : agg" > ${MPL_RC_FILE} - - echo "image.cmap : viridis" >> ${MPL_RC_FILE} - - nox --session doctest -- --verbose - - -# -# Documentation Link Check (Linux) -# -task: - only_if: ${SKIP_LINKCHECK_TASK} == "" && ${SKIP_ALL_DOC_TASKS} == "" - << : *CREDITS_TEMPLATE - env: - PY_VER: 3.8 - MPL_RC_DIR: ${HOME}/.config/matplotlib - MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc - name: "${CIRRUS_OS}: py${PY_VER} link check" - << : *LINUX_TASK_TEMPLATE - tests_script: - - mkdir -p ${MPL_RC_DIR} - - echo "backend : agg" > ${MPL_RC_FILE} - - echo "image.cmap : viridis" >> ${MPL_RC_FILE} - - nox --session linkcheck -- --verbose diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..3994ec0a83 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..82bf71c1c5 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.git_archival.txt export-subst \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index 5f65470c82..134b6ff8da 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -2,7 +2,7 @@ name: "\U0001F41B Bug Report" about: Submit a bug report to help us improve Iris title: '' -labels: 'New: Issue, Type: Bug' +labels: 'Type: Bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 500a2183d2..b5f75cc6f8 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -4,3 +4,6 @@ contact_links: - name: 💬 Iris GitHub Discussions url: https://github.com/SciTools/iris/discussions about: Engage with the Iris community to discuss your issue + - name: ❓ Usage Question + url: https://github.com/SciTools/iris/discussions/categories/q-a + About: Raise a question about how to use Iris in the Q&A section of Discussions. diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md index 8caa62a1c7..01eb2a6734 100644 --- a/.github/ISSUE_TEMPLATE/documentation.md +++ b/.github/ISSUE_TEMPLATE/documentation.md @@ -2,7 +2,7 @@ name: "\U0001F4DA Documentation" about: Report an issue with the Iris documentation title: '' -labels: 'New: Documentation, Type: Documentation' +labels: 'Type: Documentation' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index b17b6066e4..2f66321405 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -2,7 +2,6 @@ name: "✨ Feature Request" about: Submit a request for a new feature in Iris title: '' -labels: 'New: Feature' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md index e66042609c..63de163743 100644 --- a/.github/ISSUE_TEMPLATE/issue.md +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -2,7 +2,6 @@ name: "\U0001F4F0 Custom Issue" about: Submit a generic issue to help us improve Iris title: '' -labels: 'New: Issue' assignees: '' --- diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..e9b45d116a --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Reference: +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" + labels: + - "New: Pull Request" + - "Bot" diff --git a/.github/deploy_key.scitools-docs.enc b/.github/deploy_key.scitools-docs.enc deleted file mode 100644 index 165a7c1970..0000000000 --- a/.github/deploy_key.scitools-docs.enc +++ /dev/null @@ -1 +0,0 @@ -gAAAAABZSMeGIlHxHu4oCV_h8shbCRf1qJYoLO9Z0q9uKRDTlytoigzlvfxhN-9WMjc3Js1f1Zg55PfEpTOpL82p6QHF-gqW0k0qGjanO3lnQzM6EzIu3KyJPrVrL-O6edwoPMYKqwsNO3VQHNuEspsFKY0TbjnTPHc45SPU5LjEGX4c_SADSDcLDJm2rbrU2eVkT-gFHy_-ZzK0Di83WlDc79YzIkVe5BAn5PbWv3O9BROR4fJzecbjmWRT_rp1cqI_gaUpVcwTdRK3II9YnazBtW4h2WbCeTcySLD7N4o9K0P71SR6gG_XFbpML3Haf5IUdRi0qPBuvJ_4YVnnuJo6mhiIOJfUEcNj_bbLOYVzPmKyQMHvrPf_lK5JhdX6MUvqluhqHuc0i_z_j1O2y32lB7b1iiY6eE_BsNlXJHlOX1GiXkX0nZLI48p-D22jya44WshWSnVcoalcCDkdbvdFbpOscwXDR3nB-PCOmRUF_d1BlMbp1if-VP0yt3tJ_5yyCrqSRWwFusaibQTF6yoImetl7Am95hh2FjFDNkalHqtarnUv86w-26v1ukcTIjJ0iHzNbCK1m0VMkvE6uDeqRgIZnVKON5cesmM3YbulRrHpaOiSly_sMhLhfg5jTxAuOa319AQGoHEOcRLRUYdw2TQkDEiHGiUh_U4-nC7GTGDGcXyeBIa4ciuC2Qi0QXf9qyEGoIRcU8BP34LDNdtovJoZOBDzhr5Ajnu7yA3GB3TD_kiZrgm6agFuu7a51OMfjezhwGzUJ4X-empPctwm9woOJmPCTFqCvxB2VwVV0L6yngsTooyAHCi5st_AG-p5FIT3VZGx7EgCd68ze9XlRoACoe9XOdSFklbaSMGRbJlvKCPAA0zj4__PfIhlD8Cxwwjq_VXlSr_QxygIGZJlhkT46P9TroolgdipaBp1aQ3_PKHfgw5Y9ZqBKCZF5DOJejqUbfVKUp2JdqoX3yQBD0ByQFdfCuLvoiYcM2ofKdIMvel3Jwn0Nx4NYR2qg3h7FYti0jdrNlC89gnL4tKsf0DAGxZ1UYmqQMWJ3-GKCKrlKyeaHYB2djPRGP8VeoRZh_UorSNHU56KSztK_hTP6P0nFymRJRUSRBMKTaTfJf1aBlk9zJHSe9hOKwxyUNkwcTftGn5P0WNcnaTk3ecTVe-1QJKbPWwMBDzqQtTCsCizgN4UdQsmy4iMYq-LT2TC-JXXo0CPTNDybUj92wSa7KeKTvKnbN8DMZbGRdgy5BOSGw4hMIoIFSB-6tnBIvTntNfMT9ac9e9jKm47Q4qXpaeF3AsvBqxkMRQLaYVppPng6cA49VjJQDZ0gTdPKSSKZkApfeeQL0LLCGwzQ4C52TWK2NJSQ3pvRYI1F0taDQWopIiwFfox-OSYnOJECHkHjxaxhHQzVb3w47xKKZNXbLb-LV7QI-kGuKLfoqO1lq94cw1H-EVrXaGJcDDLjK2jRgdVfDyPsHMcW1oUDJqu8gQ6fCXYPbqJzdmFNFsc1hywHWCU7crV61D2QubwzbLRnP8053MvsMnbdhWtwocTlvvdG-qW6CiEA9Eanfpf0RW1W9oh6yQJ__0vS9UWswqq5ahkkpHY9LTE0US4L3xbFOrq7HgbA2jelTdPVfxo3BfUHuL8oKpFDTzgZi07gNmkhIZfpuXj2KFnm9XM31AsY6V2rXL0xSx-9rvi4FP0LK6V5vQ8OKI8aRPCDyzLUv2xnayMW4yaYg3GHD5yo7pIOswKc6GOEmetPnay3j0dVN3hfpkpfJWhss3vjZ2Zl0NmjJ7OuS25tjUGLy82A1yFSpL8mKRkHZJuMDZbd_Or6gaPVoVT_Otbkh-6pMZuDeOHOUfgey0Z374jCjRpyQ9k-Fpw8ykow8iIIQ088kC5CeQy6jRhD7mO3iR4-U1XKDJQNlNg1z_JYyDrwykp7FFN2sQn7RRYHIXx2iMrEDXdrdTrujMFN6omC13yDuXJukAgZb6zBBUTlonxRUBjUJWt2P-1sRRTsG8mr9EaE5K-xhR5Ust_37L3svNQ0vwLtPLIpWGZHhD8P_dYNR2RL4679xyzI8A7wLY82wFBHrcghAd4UtLJH9ul6IuS_CaVo-gbfowNRaQ0Zw7WHZGIXpZWEx1_zck6qDEaCY8TpQeciBWpH5uJDSYqdLdMwigdQEGzAJ1DHSWsyTrmOR7Lhwi9WqOzfWe4ahxAkAUH_Jdr_i-nGfl_x3OgQdHM7jWVMXDcXEmR0bkw-s0EKXCn20q2bxDkm5SUWkYtWAZ2aZRgo4wHOqGBcP99xZ25mq9uxtNOkLBF81lnVbn_4BAZBNnnKwwj4SafeIW4KR1ZOpnEI47sGUR6NhEk9VtJsv0zeZIv8VjRbNLh3QCxkNMue60SjJ48kjotZSX1RQJN0xwPftiABBf8MX9tyZe8emQvPeIcdQTSQPnYEUx22xZGeeJTNrZ9soQyP6mrkkRihp6o9tG7HT9QEVLGM19wAigwAAMMXGqdGzWwpar30JtJU94gAmIlwFUJqeO_fdJKFspnUyJ6gt5_oHsKNEV7Uz5EJwGpa94tlPJXjvZpu-wWQfu8U0trTU2mTCA0bmZIDID-Xk4vCW_SD4OVnsvWyga4QHSg3AqVTjnjlapAjsYcFjiOo2C_U3besloprpyuAwpTdn7zdfMHIJO0ckBFnXlk8XB3kT0YGrCpBvW6gYMXlnePVcr3wJehCvMg1Q9Dc5fVQUqt65zcjbgiudfzFGtTe9T4f1IttoAtrJgTN4W1mtbZzSK864I_ngaX5YWgZSinjkbocCCFEJDcbiXMnV7OWOZefqW6VZu4BZKEKlN9k2kH3UCECCK3uRAQIPn_48DgaVnAff2-fMADltiosSPJ_a3057acJP0cf-1QsJuV7r3zdzL3shgrMRjpSsSTCYdMhZ6disFGcJg7hJJvtH1FieZ76jps5FYi5lE8Ua9yBKlG4dCGuUBnikvpfy2FLMLFNn-iXLflu2oiBbcLvn_ReZUnFIR6KgGRN8xKEBaXATQVtb2E678GtQptK8PHP2DoAtbsIXUDn60YH04D9pEck8NnmWYAz7sWbiL6OKdaO7jQep4mt3CgkyFC0NCKP9zCbVNtmfHRVmHtckjgfHF-tK_v59KeAuwWPtm7ow2BjynAK42IGR9nWtQFRUZIboaND8UF76YGKFF7kOf_XTvoNrVTCRkD6b8KJy2IFfdoHP6WET9QLvwDSXgYLPlCX9z7aQ_lc57u5d_dGO-7NZ_Qbs69ByyIvQoztVBjw6fa7EzSwccqPfMQL_fiecNCng-r4gHaH6TlgSbfqQOISHxTtvmbym1no560ZsHfnQfuL6BCI8s6OoygxhOnQhaDqyOUVBut_x3VR_DKFMyUazXYNgLbRsdITaAvR-0gIx5TAX9n3A4HwHuiBZCtwRYaiJnW8FX9lk1Y_g5UHL2OC3rsNFui3aBLzAFhx58lALxnxhlUItuHHK9BgexnR2yCj2nOWLoWQzfFaf2_fpjEh_QBHTqUxdQZ8ighg_8lh6hmLbW4PcUxKX71RFmikLyS3-idlzsiEomNlPNaVllRF21vE6dR-nZ6xsxzTvNB4wumP2irQ9mFBTN1WpiLMyNoEEucA2I848YHUfkZrjTG_dcCQNp7H_2gKdIsZ135lUEG6lYfhLMHTmP5uYxxx3Pipjp6wF2GFCsZPIlIPsgrhbSxqkWg1EOViHtpw6ypFKn7wQHHfnrnHkFWnrKbMARVBjJUB-FhK4b6qLU_k_MTMipemneMUFXlj3EkEhKM18MIHGkIOkwG5QtPYcjUAf_2sZlxSMVnh6sQ8kVwF6lfk_l8jhoO93HUTntZUSv7GrE3s80yJgII4Qw37AdgcJiAkoPn1-17HfSsAy6uRh5-OvrCtkDqQxfuJSyn_4pRMh6hZT7N9pI5limMXXn2nHnxU93UT3qU-smA8q0ECfvK3JwoaYy_llSx0wSBvpmxjLQ302sFYM5FVZ9zRbHuLCCZShVopiyMDLHVJe_1g9Ou1KL-h6RVZgg3Ttyb5m2KDfoHEVLeZkW81YLCsyo7uNb6SVRM-615TIVGT6Eq7oJ6wO2LMDKjEpHKFiOFpY2fpR8noM81UqgLddYfl_lei7RVjaNO98otqE4iSNtpgJgyhAx4CdYm__yQRSXhckR4K7yAhM9Kh5BLbQQnf2_0WS1sWTmNMZZNMfOSqmTCRVwcYvg4TDGOA-vZARbZW1M7npVMldV_SbvgcEZD6InY9c40eheRqS0YD2W2HEZIiNeLRw0y5WBcYuJIpXhI3ViTXx-frJnv0Mo9uwmuLbJmWFcn6RdIVcU68_oPZZlZD4Vm7SjikbuZKF1BF3lXamTTDIBcWiDLwuNDv2lUkURDCWa5WJsfUCfTAJ6PTe8= \ No newline at end of file diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index b489eba036..b197b58e80 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -1,10 +1,11 @@ -# This is a basic workflow to help you get started with Actions +# Use ASV to check for performance regressions in the last 24 hours' commits. name: benchmark-check on: - # Triggers the workflow on push or pull request events but only for the master branch - pull_request: + schedule: + # Runs every day at 23:00. + - cron: "0 23 * * *" jobs: benchmark: @@ -14,41 +15,35 @@ jobs: env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.5" + IRIS_TEST_DATA_VERSION: "2.15" # Lets us manually bump the cache to rebuild + ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" + PY_VER: 3.8 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - - name: Fetch the PR base branch too - run: | - git fetch --depth=1 origin ${{ github.event.pull_request.base.ref }} - git branch _base FETCH_HEAD - echo PR_BASE_SHA=$(git rev-parse _base) >> $GITHUB_ENV + - uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Install Nox run: | pip install nox - - name: Cache .nox and .asv/env directories + - name: Cache environment directories id: cache-env-dir - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | .nox benchmarks/.asv/env - # Make sure GHA never gets an exact cache match by using the unique - # github.sha. This means it will always store this run as a new - # cache (Nox may have made relevant changes during run). Cache - # restoration still succeeds via the partial restore-key match. - key: ${{ runner.os }}-${{ github.sha }} - restore-keys: ${{ runner.os }} + $CONDA/pkgs + key: ${{ runner.os }}-${{ hashFiles('requirements/') }}-${{ env.ENV_CACHE_BUILD }} - name: Cache test data directory id: cache-test-data - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ${{ env.IRIS_TEST_DATA_PATH }} @@ -62,22 +57,69 @@ jobs: unzip -q iris-test-data.zip mkdir --parents ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_LOC_PATH} mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH} - + - name: Set test data var run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV - - name: Run CI benchmarks + - name: Run overnight benchmarks + run: | + first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1) + if [ "$first_commit" != "" ] + then + nox --session="benchmarks(overnight)" -- $first_commit + fi + + - name: Create issues for performance shifts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - mkdir --parents benchmarks/.asv - set -o pipefail - nox --session="benchmarks(ci compare)" | tee benchmarks/.asv/ci_compare.txt + if [ -d benchmarks/.asv/performance-shifts ] + then + cd benchmarks/.asv/performance-shifts + for commit_file in * + do + commit="${commit_file%.*}" + pr_number=$(git log "$commit"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) + author=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) + merger=$(gh pr view $pr_number --json mergedBy -q '.["mergedBy"]["login"]' --repo $GITHUB_REPOSITORY) + # Find a valid assignee from author/merger/nothing. + if curl -s https://api.github.com/users/$author | grep -q '"type": "User"'; then + assignee=$author + elif curl -s https://api.github.com/users/$merger | grep -q '"type": "User"'; then + assignee=$merger + else + assignee="" + fi + title="Performance Shift(s): \`$commit\`" + body=" + Benchmark comparison has identified performance shifts at + + * commit $commit (#$pr_number). + + Please review the report below and \ + take corrective/congratulatory action as appropriate \ + :slightly_smiling_face: + +
+ Performance shift report + + \`\`\` + $(cat $commit_file) + \`\`\` + +
+ + Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) + " + gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + done + fi - name: Archive asv results if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: asv-report path: | benchmarks/.asv/results - benchmarks/.asv/ci_compare.txt diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml new file mode 100644 index 0000000000..43bacd3ec5 --- /dev/null +++ b/.github/workflows/ci-tests.yml @@ -0,0 +1,136 @@ +# reference: +# - https://github.com/actions/cache +# - https://github.com/actions/checkout +# - https://github.com/marketplace/actions/setup-miniconda + +name: ci-tests + +on: + push: + branches: + - "main" + - "v*x" + tags: + - "v*" + pull_request: + branches: + - "*" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + tests: + name: "${{ matrix.session }} (py${{ matrix.python-version }} ${{ matrix.os }})" + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash -l {0} + + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] + python-version: ["3.10"] + session: ["tests", "doctest", "gallery", "linkcheck"] + include: + - os: "ubuntu-latest" + python-version: "3.9" + session: "tests" + - os: "ubuntu-latest" + python-version: "3.8" + session: "tests" + + env: + IRIS_TEST_DATA_VERSION: "2.16" + ENV_NAME: "ci-tests" + + steps: + - name: "checkout" + uses: actions/checkout@v3 + + - name: "environment configure" + env: + # Maximum cache period (in weeks) before forcing a cache refresh. + CACHE_WEEKS: 2 + run: | + echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + + - name: "data cache" + uses: ./.github/workflows/composite/iris-data-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + version: ${{ env.IRIS_TEST_DATA_VERSION }} + + - name: "conda package cache" + uses: ./.github/workflows/composite/conda-pkg-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "conda install" + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-version: latest + channels: conda-forge,defaults + activate-environment: ${{ env.ENV_NAME }} + auto-update-conda: false + use-only-tar-bz2: true + + - name: "conda environment cache" + uses: ./.github/workflows/composite/conda-env-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + install_packages: "cartopy nox pip" + + - name: "conda info" + run: | + conda info + conda list + + - name: "cartopy cache" + uses: ./.github/workflows/composite/cartopy-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "nox cache" + uses: ./.github/workflows/composite/nox-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + lock_file: ${{ env.LOCK_FILE }} + + # TODO: drop use of site.cfg and explicit use of mplrc + - name: "iris configure" + env: + SITE_CFG: lib/iris/etc/site.cfg + MPL_RC: ${HOME}/.config/matplotlib/matplotlibrc + run: | + mkdir -p $(dirname ${SITE_CFG}) + echo ${SITE_CFG} + echo "[Resources]" >> ${SITE_CFG} + echo "test_data_dir = ${HOME}/iris-test-data/test_data" >> ${SITE_CFG} + echo "doc_dir = ${GITHUB_WORKSPACE}/docs" >> ${SITE_CFG} + cat ${SITE_CFG} + mkdir -p $(dirname ${MPL_RC}) + echo ${MPL_RC} + echo "backend : agg" >> ${MPL_RC} + echo "image.cmap : viridis" >> ${MPL_RC} + cat ${MPL_RC} + + - name: "iris ${{ matrix.session }}" + env: + PY_VER: ${{ matrix.python-version }} + run: | + nox --session ${{ matrix.session }} -- --verbose diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml new file mode 100644 index 0000000000..265489883f --- /dev/null +++ b/.github/workflows/ci-wheels.yml @@ -0,0 +1,166 @@ +# Reference: +# - https://github.com/actions/checkout +# - https://github.com/actions/download-artifact +# - https://github.com/actions/upload-artifact +# - https://github.com/pypa/build +# - https://github.com/pypa/gh-action-pypi-publish +# - https://test.pypi.org/help/#apitoken + +name: ci-wheels + +on: + pull_request: + + push: + tags: + - "v*" + branches-ignore: + - "auto-update-lockfiles" + - "pre-commit-ci-update-config" + - "dependabot/*" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: "build sdist & wheel" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: "building" + shell: bash + run: | + # require build with explicit --sdist and --wheel in order to + # get correct version associated with sdist and bdist artifacts + pipx run build --sdist --wheel + + - uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist/* + + test-wheel: + needs: build + name: "test wheel (py${{ matrix.python-version }})" + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10"] + session: ["wheel"] + env: + ENV_NAME: "ci-wheels" + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - name: "environment configure" + env: + # Maximum cache period (in weeks) before forcing a cache refresh. + CACHE_WEEKS: 2 + run: | + echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + + - name: "conda package cache" + uses: ./.github/workflows/composite/conda-pkg-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "conda install" + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-version: latest + channels: conda-forge,defaults + activate-environment: ${{ env.ENV_NAME }} + auto-update-conda: false + use-only-tar-bz2: true + + - name: "conda environment cache" + uses: ./.github/workflows/composite/conda-env-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + install_packages: "nox pip" + + - name: "nox cache" + uses: ./.github/workflows/composite/nox-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + lock_file: ${{ env.LOCK_FILE }} + + - name: "nox install and test wheel" + env: + PY_VER: ${{ matrix.python-version }} + run: | + nox --session ${{ matrix.session }} -- --verbose + + show-artifacts: + needs: build + name: "show artifacts" + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - shell: bash + run: | + ls -l ${{ github.workspace }}/dist + + publish-artifacts-test-pypi: + needs: test-wheel + name: "publish to test.pypi" + runs-on: ubuntu-latest + # upload to Test PyPI for every commit on main branch + if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + skip_existing: true + print_hash: true + + publish-artifacts-pypi: + needs: test-wheel + name: "publish to pypi" + runs-on: ubuntu-latest + # upload to PyPI for every tag starting with 'v' + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + print_hash: true \ No newline at end of file diff --git a/.github/workflows/composite/cartopy-cache/action.yml b/.github/workflows/composite/cartopy-cache/action.yml new file mode 100644 index 0000000000..e805cbacc1 --- /dev/null +++ b/.github/workflows/composite/cartopy-cache/action.yml @@ -0,0 +1,41 @@ +name: "cartopy-cache" +description: "create and cache cartopy assets" + +# +# Assumes the environment contains the following variables: +# - CONDA +# +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: cartopy-cache + with: + path: ~/.local/share/cartopy + key: ${{ runner.os }}-cartopy-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} + + - if: steps.cartopy-cache.outputs.cache-hit != 'true' + env: + CARTOPY_SHARE_DIR: ~/.local/share/cartopy + CARTOPY_FEATURE: https://raw.githubusercontent.com/SciTools/cartopy/v0.20.0/tools/cartopy_feature_download.py + shell: bash + run: | + # Require to explicitly activate the environment within the composite action. + source ${{ env.CONDA }}/etc/profile.d/conda.sh >/dev/null 2>&1 + conda activate ${{ inputs.env_name }} + wget --quiet ${CARTOPY_FEATURE} + mkdir -p ${CARTOPY_SHARE_DIR} + # Requires a pre-installed version of cartopy within the environment. + python cartopy_feature_download.py physical --output ${CARTOPY_SHARE_DIR} --no-warn diff --git a/.github/workflows/composite/conda-env-cache/action.yml b/.github/workflows/composite/conda-env-cache/action.yml new file mode 100644 index 0000000000..6bfd6fff90 --- /dev/null +++ b/.github/workflows/composite/conda-env-cache/action.yml @@ -0,0 +1,35 @@ +name: "conda-env-cache" +description: "create and cache the conda environment" + +# +# Assumes the environment contains the following variables: +# - CONDA +# +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + install_packages: + description: "conda packages to install into environment" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: conda-env-cache + with: + path: ${{ env.CONDA }}/envs/${{ inputs.env_name }} + key: ${{ runner.os }}-conda-env-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} + + - if: steps.conda-env-cache.outputs.cache-hit != 'true' + shell: bash + run: | + conda install --quiet --name ${{ inputs.env_name }} ${{ inputs.install_packages }} diff --git a/.github/workflows/composite/conda-pkg-cache/action.yml b/.github/workflows/composite/conda-pkg-cache/action.yml new file mode 100644 index 0000000000..4472d7e415 --- /dev/null +++ b/.github/workflows/composite/conda-pkg-cache/action.yml @@ -0,0 +1,22 @@ +name: "conda-pkg-cache" +description: "cache the conda environment packages" + +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-pkgs-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} diff --git a/.github/workflows/composite/iris-data-cache/action.yml b/.github/workflows/composite/iris-data-cache/action.yml new file mode 100644 index 0000000000..7bf72fae8b --- /dev/null +++ b/.github/workflows/composite/iris-data-cache/action.yml @@ -0,0 +1,30 @@ +name: "iris-data-cache" +description: "create and cache the iris test data" + +inputs: + cache_build: + description: "data cache build number" + required: false + default: "0" + env_name: + description: "environment name" + required: true + version: + description: "iris test data version" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: data-cache + with: + path: ~/iris-test-data + key: ${{ runner.os }}-iris-test-data-${{ inputs.env_name }}-v${{ inputs.version }}-b${{ inputs.cache_build }} + + - if: steps.data-cache.outputs.cache-hit != 'true' + shell: bash + run: | + wget --quiet https://github.com/SciTools/iris-test-data/archive/v${{ inputs.version }}.zip -O iris-test-data.zip + unzip -q iris-test-data.zip + mv iris-test-data-${{ inputs.version }} ~/iris-test-data diff --git a/.github/workflows/composite/nox-cache/action.yml b/.github/workflows/composite/nox-cache/action.yml new file mode 100644 index 0000000000..468dd22d81 --- /dev/null +++ b/.github/workflows/composite/nox-cache/action.yml @@ -0,0 +1,22 @@ +name: "nox cache" +description: "cache the nox test environments" + +inputs: + cache_build: + description: "nox cache build number" + required: false + default: "0" + env_name: + description: "environment name" + required: true + lock_file: + description: "conda-lock environment requirements filename" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.nox + key: ${{ runner.os }}-nox-${{ inputs.env_name }}-s${{ matrix.session }}-py${{ matrix.python-version }}-b${{ inputs.cache_build }}-${{ hashFiles(inputs.lock_file) }} diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml old mode 100755 new mode 100644 index 3106d94a67..4e717be8a1 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -2,7 +2,7 @@ # available packages and dependencies. # # Environment specifications are given as conda environment.yml files found in -# `requirements/ci/py**.yml`. These state the pacakges required, the conda channels +# `requirements/ci/py**.yml`. These state the packages required, the conda channels # that the packages will be pulled from, and any versions of packages that need to be # pinned at specific versions. # @@ -14,107 +14,102 @@ name: Refresh Lockfiles on: workflow_dispatch: - inputs: - clobber: - description: | - Force the workflow to run, potentially clobbering any commits already made to the branch. - Enter "yes" or "true" to run. - default: "no" schedule: # Run once a week on a Saturday night - - cron: 1 0 * * 6 + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "1 0 * * 6" jobs: - - no_clobber: + + get_python_matrix: + # Determines which Python versions should be included in the matrix used in + # the gen_lockfiles job. if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.get_py.outputs.matrix }} steps: - # check if the auto-update-lockfiles branch exists. If it does, and someone other than - # the lockfile bot has made the head commit, abort the workflow. - # This job can be manually overridden by running directly from the github actions panel - # (known as a "workflow_dispatch") and setting the `clobber` input to "yes". - - uses: actions/script@v4 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - if (context.eventName == "workflow_dispatch") { - const clobber = context.payload.inputs.clobber || "no"; - if (["yes", "true", "y"].includes(clobber.trim().toLowerCase())) { - core.info("Manual override, continuing workflow, potentially overwriting previous commits to auto-update-lockfiles"); - return - } - } - github.repos.getBranch({...context.repo, branch: "auto-update-lockfiles"}).then(res => { - const committer = res.data.commit.commit.committer; - if (committer && committer.name === "Lockfile bot") { - core.info("Lockfile bot was the last to push to auto-update-lockfiles. Continue."); - } else { - core.setFailed("New commits to auto-update-lockfiles since bot last ran. Abort!"); - } - }).catch(err => { - if (err.status === 404) { - core.info("auto-update-lockfiles branch not found, continue"); - } - }) + - uses: actions/checkout@v3 + - id: get_py + run: echo "::set-output name=matrix::$(ls -1 requirements/ci/py*.yml | xargs -n1 basename | sed 's/....$//' | jq -cnR '[inputs]')" gen_lockfiles: # this is a matrix job: it splits to create new lockfiles for each # of the CI test python versions. - # this list below should be changed when covering more python versions - # TODO: generate this matrix automatically from the list of available py**.yml files - # ref: https://tomasvotruba.com/blog/2020/11/16/how-to-make-dynamic-matrix-in-github-actions/ if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest - needs: no_clobber - + needs: get_python_matrix + strategy: matrix: - python: ['38'] - + python: ${{ fromJSON(needs.get_python_matrix.outputs.matrix) }} + steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: install conda-lock run: | source $CONDA/bin/activate base conda install -y -c conda-forge conda-lock - name: generate lockfile run: | - $CONDA/bin/conda-lock lock -p linux-64 -f requirements/ci/py${{matrix.python}}.yml - mv conda-linux-64.lock py${{matrix.python}}-linux-64.lock + $CONDA/bin/conda-lock lock -k explicit -p linux-64 -f requirements/ci/${{matrix.python}}.yml + mv conda-linux-64.lock ${{matrix.python}}-linux-64.lock - name: output lockfile - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: - path: py${{matrix.python}}-linux-64.lock - + path: ${{matrix.python}}-linux-64.lock + create_pr: # once the matrix job has completed all the lock files will have been uploaded as artifacts. # Download the artifacts, add them to the repo, and create a PR. if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest needs: gen_lockfiles - + steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: get artifacts - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: path: artifacts - + - name: Update lock files in repo run: | cp artifacts/artifact/*.lock requirements/ci/nox.lock rm -r artifacts - + + - name: "Generate token" + uses: tibdex/github-app-token@v1 + id: generate-token + with: + app_id: ${{ secrets.AUTH_APP_ID }} + private_key: ${{ secrets.AUTH_APP_PRIVATE_KEY }} + - name: Create Pull Request - uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 + id: cpr + uses: peter-evans/create-pull-request@18f90432bedd2afd6a825469ffd38aa24712a91d with: + token: ${{ steps.generate-token.outputs.token }} commit-message: Updated environment lockfiles committer: "Lockfile bot " author: "Lockfile bot " delete-branch: true branch: auto-update-lockfiles - title: Update CI environment lockfiles + title: "[iris.ci] environment lockfiles auto-update" body: | Lockfiles updated to the latest resolvable environment. + + ### If the CI tasks fail, create a new branch based on this PR and add the required fixes to that branch. + labels: | + New: Pull Request + Bot + + - name: Check Pull Request + if: steps.cpr.outputs.pull-request-number != '' + run: | + echo "pull-request #${{ steps.cpr.outputs.pull-request-number }}" + echo "pull-request URL ${{ steps.cpr.outputs.pull-request-url }}" + echo "pull-request operation [${{ steps.cpr.outputs.pull-request-operation }}]" + echo "pull-request head SHA ${{ steps.cpr.outputs.pull-request-head-sha }}" diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a38a03637e..008fe56deb 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,16 +1,20 @@ # See https://github.com/actions/stale name: Stale issues and pull-requests + on: schedule: - - cron: 0 0 * * * + # Run once a day + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "0 0 * * *" jobs: stale: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v4.0.0 + - uses: actions/stale@v5 with: repo-token: ${{ secrets.GITHUB_TOKEN }} @@ -59,11 +63,11 @@ jobs: stale-pr-label: Stale # Labels on issues exempted from stale. - exempt-issue-labels: | + exempt-issue-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Labels on prs exempted from stale. - exempt-pr-labels: | + exempt-pr-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Max number of operations per run. diff --git a/.gitignore b/.gitignore index b9fa92139d..512fbab231 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,8 @@ *.py[co] +# setuptools-scm +_version.py + # Environment file which should be autogenerated *conda_requirements.txt* @@ -26,8 +29,9 @@ pip-cache .tox .pytest_cache -# asv environments, results +# asv data, environments, results .asv +benchmarks/.data #Translations *.mo @@ -54,6 +58,9 @@ lib/iris/tests/results/imagerepo.lock /.idea *.cover +# vscode files +.vscode + # Auto generated documentation files docs/src/_build/* docs/src/generated diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97dff666cf..22746cb0ee 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.3.0 hooks: # Prevent giant files from being committed. - id: check-added-large-files @@ -29,14 +29,14 @@ repos: - id: no-commit-to-branch - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 22.8.0 hooks: - id: black pass_filenames: false args: [--config=./pyproject.toml, .] - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 5.0.4 hooks: - id: flake8 types: [file, python] @@ -50,14 +50,14 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.0 + rev: v1.12.1 hooks: - id: blacken-docs types: [file, rst] additional_dependencies: [black==21.6b0] - repo: https://github.com/aio-libs/sort-all - rev: v1.1.0 + rev: v1.2.0 hooks: - id: sort-all types: [file, python] diff --git a/.readthedocs.yml b/.readthedocs.yml index 63c4798050..95f828a873 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,6 +4,19 @@ build: os: ubuntu-20.04 tools: python: mambaforge-4.10 + jobs: + post_checkout: + # The SciTools/iris repository is shallow i.e., has a .git/shallow, + # therefore complete the repository with a full history in order + # to allow setuptools-scm to correctly auto-discover the version. + - git fetch --unshallow + - git fetch --all + # Need to stash the local changes that Read the Docs makes so that + # setuptools_scm can generate the correct Iris version. + pre_install: + - git stash + post_install: + - git stash pop conda: environment: requirements/ci/readthedocs.yml diff --git a/MANIFEST.in b/MANIFEST.in index 52492b17b2..ad28df9c7c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,15 +1,13 @@ # Top-level files include CHANGES COPYING COPYING.LESSER +prune .github +exclude .gitignore -# Files from setup.py package_data that are not automatically added to source distributions -recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json -recursive-include lib/iris/etc * -include lib/iris/tests/stock/file_headers/* - +# Files required for conda package management recursive-include requirements * -# File required to build docs -recursive-include docs Makefile *.js *.png *.py *.rst +# Files required to build docs +recursive-include docs * prune docs/src/_build prune docs/src/generated prune docs/gallery_tests @@ -18,6 +16,5 @@ prune docs/gallery_tests include tools/generate_std_names.py include etc/cf-standard-name-table.xml -global-exclude *.pyc -global-exclude __pycache__ -global-exclude iris_image_test_output +global-exclude *.py[cod] +global-exclude __pycache__ \ No newline at end of file diff --git a/README.md b/README.md index 1b2a7b496f..ac2781f469 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

- Iris
+ Iris

@@ -10,15 +10,18 @@

- -Cirrus-CI + +ci-tests Documentation Status pre-commit.ci status +

+ +

conda-forge diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 0000000000..8dffd473f3 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,99 @@ +# Iris Performance Benchmarking + +Iris uses an [Airspeed Velocity](https://github.com/airspeed-velocity/asv) +(ASV) setup to benchmark performance. This is primarily designed to check for +performance shifts between commits using statistical analysis, but can also +be easily repurposed for manual comparative and scalability analyses. + +The benchmarks are automatically run overnight +[by a GitHub Action](../.github/workflows/benchmark.yml), with any notable +shifts in performance being flagged in a new GitHub issue. + +## Running benchmarks + +`asv ...` commands must be run from this directory. You will need to have ASV +installed, as well as Nox (see +[Benchmark environments](#benchmark-environments)). + +[Iris' noxfile](../noxfile.py) includes a `benchmarks` session that provides +conveniences for setting up before benchmarking, and can also replicate the +automated overnight run locally. See the session docstring for detail. + +### Environment variables + +* `OVERRIDE_TEST_DATA_REPOSITORY` - required - some benchmarks use +`iris-test-data` content, and your local `site.cfg` is not available for +benchmark scripts. +* `DATA_GEN_PYTHON` - required - path to a Python executable that can be +used to generate benchmark test objects/files; see +[Data generation](#data-generation). The Nox session sets this automatically, +but will defer to any value already set in the shell. +* `BENCHMARK_DATA` - optional - path to a directory for benchmark synthetic +test data, which the benchmark scripts will create if it doesn't already +exist. Defaults to `/benchmarks/.data/` if not set. Note that some of +the generated files, especially in the 'SPerf' suite, are many GB in size so +plan accordingly. +* `ON_DEMAND_BENCHMARKS` - optional - when set (to any value): benchmarks +decorated with `@on_demand_benchmark` are included in the ASV run. Usually +coupled with the ASV `--bench` argument to only run the benchmark(s) of +interest. Is set during the Nox `cperf` and `sperf` sessions. + +## Writing benchmarks + +[See the ASV docs](https://asv.readthedocs.io/) for full detail. + +### Data generation +**Important:** be sure not to use the benchmarking environment to generate any +test objects/files, as this environment changes with each commit being +benchmarked, creating inconsistent benchmark 'conditions'. The +[generate_data](./benchmarks/generate_data/__init__.py) module offers a +solution; read more detail there. + +### ASV re-run behaviour + +Note that ASV re-runs a benchmark multiple times between its `setup()` routine. +This is a problem for benchmarking certain Iris operations such as data +realisation, since the data will no longer be lazy after the first run. +Consider writing extra steps to restore objects' original state _within_ the +benchmark itself. + +If adding steps to the benchmark will skew the result too much then re-running +can be disabled by setting an attribute on the benchmark: `number = 1`. To +maintain result accuracy this should be accompanied by increasing the number of +repeats _between_ `setup()` calls using the `repeat` attribute. +`warmup_time = 0` is also advisable since ASV performs independent re-runs to +estimate run-time, and these will still be subject to the original problem. + +### Scaling / non-Scaling Performance Differences + +When comparing performance between commits/file-type/whatever it can be helpful +to know if the differences exist in scaling or non-scaling parts of the Iris +functionality in question. This can be done using a size parameter, setting +one value to be as small as possible (e.g. a scalar `Cube`), and the other to +be significantly larger (e.g. a 1000x1000 `Cube`). Performance differences +might only be seen for the larger value, or the smaller, or both, getting you +closer to the root cause. + +### On-demand benchmarks + +Some benchmarks provide useful insight but are inappropriate to be included in +a benchmark run by default, e.g. those with long run-times or requiring a local +file. These benchmarks should be decorated with `@on_demand_benchmark` +(see [benchmarks init](./benchmarks/__init__.py)), which +sets the benchmark to only be included in a run when the `ON_DEMAND_BENCHMARKS` +environment variable is set. Examples include the CPerf and SPerf benchmark +suites for the UK Met Office NG-VAT project. + +## Benchmark environments + +We have disabled ASV's standard environment management, instead using an +environment built using the same Nox scripts as Iris' test environments. This +is done using ASV's plugin architecture - see +[asv_delegated_conda.py](asv_delegated_conda.py) and the extra config items in +[asv.conf.json](asv.conf.json). + +(ASV is written to control the environment(s) that benchmarks are run in - +minimising external factors and also allowing it to compare between a matrix +of dependencies (each in a separate environment). We have chosen to sacrifice +these features in favour of testing each commit with its intended dependencies, +controlled by Nox + lock-files). diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 9ea1cdb101..7337eaa8c7 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -3,18 +3,26 @@ "project": "scitools-iris", "project_url": "https://github.com/SciTools/iris", "repo": "..", - "environment_type": "nox-conda", + "environment_type": "conda-delegated", "show_commit_url": "http://github.com/scitools/iris/commit/", + "branches": ["upstream/main"], "benchmark_dir": "./benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", "html_dir": ".asv/html", - "plugins": [".nox_asv_plugin"], - // The commit to checkout to first run Nox to set up the environment. - "nox_setup_commit": "HEAD", - // The path of the noxfile's location relative to the project root. - "noxfile_rel_path": "noxfile.py", - // The ``--session`` arg to be used with ``--install-only`` to prep an environment. - "nox_session_name": "tests" + "plugins": [".asv_delegated_conda"], + + // The command(s) that create/update an environment correctly for the + // checked-out commit. + // Interpreted the same as build_command, with following exceptions: + // * No build-time environment variables. + // * Is run in the same environment as the ASV install itself. + "delegated_env_commands": [ + "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", + "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + ], + // The parent directory of the above environment. + // The most recently modified environment in the directory will be used. + "delegated_env_parent": "{conf_dir}/.asv/env/nox01" } diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py new file mode 100644 index 0000000000..250a4e032d --- /dev/null +++ b/benchmarks/asv_delegated_conda.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +subclass that manages the Conda environment via custom user scripts. + +""" + +from os import environ +from os.path import getmtime +from pathlib import Path +from shutil import copy2, copytree, rmtree +from tempfile import TemporaryDirectory + +from asv import util as asv_util +from asv.config import Config +from asv.console import log +from asv.plugins.conda import Conda +from asv.repo import Repo + + +class CondaDelegated(Conda): + """ + Manage a Conda environment using custom user scripts, run at each commit. + + Ignores user input variations - ``matrix`` / ``pythons`` / + ``conda_environment_file``, since environment is being managed outside ASV. + + Original environment creation behaviour is inherited, but upon checking out + a commit the custom script(s) are run and the original environment is + replaced with a symlink to the custom environment. This arrangement is then + re-used in subsequent runs. + + """ + + tool_name = "conda-delegated" + + def __init__( + self, + conf: Config, + python: str, + requirements: dict, + tagged_env_vars: dict, + ) -> None: + """ + Parameters + ---------- + conf : Config instance + + python : str + Version of Python. Must be of the form "MAJOR.MINOR". + + requirements : dict + Dictionary mapping a PyPI package name to a version + identifier string. + + tagged_env_vars : dict + Environment variables, tagged for build vs. non-build + + """ + ignored = ["`python`"] + if requirements: + ignored.append("`requirements`") + if tagged_env_vars: + ignored.append("`tagged_env_vars`") + if conf.conda_environment_file: + ignored.append("`conda_environment_file`") + message = ( + f"Ignoring ASV setting(s): {', '.join(ignored)}. Benchmark " + "environment management is delegated to third party script(s)." + ) + log.warning(message) + requirements = {} + tagged_env_vars = {} + conf.conda_environment_file = None + + super().__init__(conf, python, requirements, tagged_env_vars) + self._update_info() + + self._env_commands = self._interpolate_commands( + conf.delegated_env_commands + ) + # Again using _interpolate_commands to get env parent path - allows use + # of the same ASV env variables. + env_parent_interpolated = self._interpolate_commands( + conf.delegated_env_parent + ) + # Returns list of tuples, we just want the first. + env_parent_first = env_parent_interpolated[0] + # The 'command' is the first item in the returned tuple. + env_parent_string = " ".join(env_parent_first[0]) + self._delegated_env_parent = Path(env_parent_string).resolve() + + @property + def name(self): + """Get a name to uniquely identify this environment.""" + return asv_util.sanitize_filename(self.tool_name) + + def _update_info(self) -> None: + """Make sure class properties reflect the actual environment being used.""" + # Follow symlink if it has been created. + actual_path = Path(self._path).resolve() + self._path = str(actual_path) + + # Get custom environment's Python version if it exists yet. + try: + get_version = ( + "from sys import version_info; " + "print(f'{version_info.major}.{version_info.minor}')" + ) + actual_python = self.run(["-c", get_version]) + self._python = actual_python + except OSError: + pass + + def _prep_env(self) -> None: + """Run the custom environment script(s) and switch to using that environment.""" + message = f"Running delegated environment management for: {self.name}" + log.info(message) + env_path = Path(self._path) + + def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: + """For copying between self._path and a temporary cache.""" + asv_files = list(src_parent.glob("asv*")) + # build_root_path.name usually == "project" . + asv_files += [src_parent / Path(self._build_root).name] + for src_path in asv_files: + dst_path = dst_parent / src_path.name + if not dst_path.exists(): + # Only caching in case the environment has been rebuilt. + # If the dst_path already exists: rebuilding hasn't + # happened. Also a non-issue when copying in the reverse + # direction because the cache dir is temporary. + if src_path.is_dir(): + func = copytree + else: + func = copy2 + func(src_path, dst_path) + + with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache: + asv_cache_path = Path(asv_cache) + # Cache all of ASV's files as delegated command may remove and + # re-build the environment. + copy_asv_files(env_path.resolve(), asv_cache_path) + + # Adapt the build_dir to the cache location. + build_root_path = Path(self._build_root) + build_dir_original = build_root_path / self._repo_subdir + build_dir_subpath = build_dir_original.relative_to( + build_root_path.parent + ) + build_dir = asv_cache_path / build_dir_subpath + + # Run the script(s) for delegated environment creation/updating. + # (An adaptation of self._interpolate_and_run_commands). + for command, env, return_codes, cwd in self._env_commands: + local_envs = dict(environ) + local_envs.update(env) + if cwd is None: + cwd = str(build_dir) + _ = asv_util.check_output( + command, + timeout=self._install_timeout, + cwd=cwd, + env=local_envs, + valid_return_codes=return_codes, + ) + + # Replace the env that ASV created with a symlink to the env + # created/updated by the custom script. + delegated_env_path = sorted( + self._delegated_env_parent.glob("*"), + key=getmtime, + reverse=True, + )[0] + if env_path.resolve() != delegated_env_path: + try: + env_path.unlink(missing_ok=True) + except IsADirectoryError: + rmtree(env_path) + env_path.symlink_to( + delegated_env_path, target_is_directory=True + ) + + # Check that environment exists. + try: + env_path.resolve(strict=True) + except FileNotFoundError: + message = f"Path does not resolve to environment: {env_path}" + log.error(message) + raise RuntimeError(message) + + # Restore ASV's files from the cache (if necessary). + copy_asv_files(asv_cache_path, env_path.resolve()) + + # Record new environment information in properties. + self._update_info() + + def checkout_project(self, repo: Repo, commit_hash: str) -> None: + """Check out the working tree of the project at given commit hash.""" + super().checkout_project(repo, commit_hash) + self._prep_env() + log.info( + f"Environment {self.name} updated to spec at {commit_hash[:8]}" + ) diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 2e741c3da0..c86682ca4a 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -4,46 +4,121 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """Common code for benchmarks.""" +from os import environ +import resource -import os -from pathlib import Path +ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. -# Environment variable names -_ASVDIR_VARNAME = "ASV_DIR" # As set in nightly script "asv_nightly/asv.sh" -_DATADIR_VARNAME = "BENCHMARK_DATA" # For local runs -ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. +def disable_repeat_between_setup(benchmark_object): + """ + Decorator for benchmarks where object persistence would be inappropriate. + + E.g: + * Benchmarking data realisation + * Benchmarking Cube coord addition + + Can be applied to benchmark classes/methods/functions. + + https://asv.readthedocs.io/en/stable/benchmarks.html#timing-benchmarks + + """ + # Prevent repeat runs between setup() runs - object(s) will persist after 1st. + benchmark_object.number = 1 + # Compensate for reduced certainty by increasing number of repeats. + # (setup() is run between each repeat). + # Minimum 5 repeats, run up to 30 repeats / 20 secs whichever comes first. + benchmark_object.repeat = (5, 30, 20.0) + # ASV uses warmup to estimate benchmark time before planning the real run. + # Prevent this, since object(s) will persist after first warmup run, + # which would give ASV misleading info (warmups ignore ``number``). + benchmark_object.warmup_time = 0.0 + + return benchmark_object + + +class TrackAddedMemoryAllocation: + """ + Context manager which measures by how much process resident memory grew, + during execution of its enclosed code block. + + Obviously limited as to what it actually measures : Relies on the current + process not having significant unused (de-allocated) memory when the + tested codeblock runs, and only reliable when the code allocates a + significant amount of new memory. + + Example: + with TrackAddedMemoryAllocation() as mb: + initial_call() + other_call() + result = mb.addedmem_mb() + + Attributes + ---------- + RESULT_MINIMUM_MB : float + The smallest result that should ever be returned, in Mb. Results + fluctuate from run to run (usually within 1Mb) so if a result is + sufficiently small this noise will produce a before-after ratio over + AVD's detection threshold and be treated as 'signal'. Results + smaller than this value will therefore be returned as equal to this + value, ensuring fractionally small noise / no noise at all. + + """ + + RESULT_MINIMUM_MB = 5.0 + + @staticmethod + def process_resident_memory_mb(): + return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024.0 + + def __enter__(self): + self.mb_before = self.process_resident_memory_mb() + return self + + def __exit__(self, *_): + self.mb_after = self.process_resident_memory_mb() + + def addedmem_mb(self): + """Return measured memory growth, in Mb.""" + result = self.mb_after - self.mb_before + # Small results are too vulnerable to noise being interpreted as signal. + result = max(self.RESULT_MINIMUM_MB, result) + return result + + @staticmethod + def decorator(decorated_func): + """ + Decorates this benchmark to track growth in resident memory during execution. + + Intended for use on ASV ``track_`` benchmarks. Applies the + :class:`TrackAddedMemoryAllocation` context manager to the benchmark + code, sets the benchmark ``unit`` attribute to ``Mb``. + + """ + + def _wrapper(*args, **kwargs): + assert decorated_func.__name__[:6] == "track_" + # Run the decorated benchmark within the added memory context + # manager. + with TrackAddedMemoryAllocation() as mb: + decorated_func(*args, **kwargs) + return mb.addedmem_mb() + + decorated_func.unit = "Mb" + return _wrapper + + +def on_demand_benchmark(benchmark_object): + """ + Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. -# Work out where the benchmark data dir is. -asv_dir = os.environ.get("ASV_DIR", None) -if asv_dir: - # For an overnight run, this comes from the 'ASV_DIR' setting. - benchmark_data_dir = Path(asv_dir) / "data" -else: - # For a local run, you set 'BENCHMARK_DATA'. - benchmark_data_dir = os.environ.get(_DATADIR_VARNAME, None) - if benchmark_data_dir is not None: - benchmark_data_dir = Path(benchmark_data_dir) + For benchmarks that, for whatever reason, should not be run by default. + E.g: + * Require a local file + * Used for scalability analysis instead of commit monitoring. + Can be applied to benchmark classes/methods/functions. -def testdata_path(*path_names): """ - Return the path of a benchmark test data file. - - These are based from a test-data location dir, which is either - ${}/data (for overnight tests), or ${} for local testing. - - If neither of these were set, an error is raised. - - """.format( - _ASVDIR_VARNAME, _DATADIR_VARNAME - ) - if benchmark_data_dir is None: - msg = ( - "Benchmark data dir is not defined : " - 'Either "${}" or "${}" must be set.' - ) - raise (ValueError(msg.format(_ASVDIR_VARNAME, _DATADIR_VARNAME))) - path = benchmark_data_dir.joinpath(*path_names) - path = str(path) # Because Iris doesn't understand Path objects yet. - return path + if "ON_DEMAND_BENCHMARKS" in environ: + return benchmark_object diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 270119da71..4cc4f6c70a 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -10,9 +10,10 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import aux_factory, coords +from . import ARTIFICIAL_DIM_SIZE + class FactoryCommon: # TODO: once https://github.com/airspeed-velocity/asv/pull/828 is released: @@ -43,10 +44,6 @@ def time_create(self): specified in the subclass.""" self.create() - def time_return(self): - """Return an instance of the benchmarked factory.""" - self.factory - class HybridHeightFactory(FactoryCommon): def setup(self): diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index fce7318d49..3107dcf077 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -10,9 +10,10 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords +from . import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup + def setup(): """General variables needed by multiple benchmark classes.""" @@ -50,10 +51,6 @@ def time_create(self): specified in the subclass.""" self.create() - def time_return(self): - """Return an instance of the benchmarked coord.""" - self.component - class DimCoord(CoordCommon): def setup(self): @@ -92,6 +89,23 @@ def setup(self): def create(self): return coords.AuxCoord(**self.create_kwargs) + def time_points(self): + _ = self.component.points + + def time_bounds(self): + _ = self.component.bounds + + +@disable_repeat_between_setup +class AuxCoordLazy(AuxCoord): + """Lazy equivalent of :class:`AuxCoord`.""" + + def setup(self): + super().setup() + self.create_kwargs["points"] = self.component.lazy_points() + self.create_kwargs["bounds"] = self.component.lazy_bounds() + self.setup_common() + class CellMeasure(CoordCommon): def setup(self): diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py new file mode 100644 index 0000000000..fb311c44dc --- /dev/null +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -0,0 +1,97 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. + +CPerf = comparing performance working with data in UM versus LFRic formats. + +Files available from the UK Met Office: + moo ls moose:/adhoc/projects/avd/asv/data_for_nightly_tests/ +""" +import numpy as np + +from iris import load_cube + +# TODO: remove uses of PARSE_UGRID_ON_LOAD once UGRID parsing is core behaviour. +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from ..generate_data import BENCHMARK_DATA +from ..generate_data.ugrid import make_cubesphere_testfile + +# The data of the core test UM files has dtype=np.float32 shape=(1920, 2560) +_UM_DIMS_YX = (1920, 2560) +# The closest cubesphere size in terms of datapoints is sqrt(1920*2560 / 6) +# This gives ~= 905, i.e. "C905" +_N_CUBESPHERE_UM_EQUIVALENT = int(np.sqrt(np.prod(_UM_DIMS_YX) / 6)) + + +class SingleDiagnosticMixin: + """For use in any benchmark classes that work on a single diagnostic file.""" + + params = [ + ["LFRic", "UM", "UM_lbpack0", "UM_netcdf"], + [False, True], + [False, True], + ] + param_names = ["file type", "height dim (len 71)", "time dim (len 3)"] + + def setup(self, file_type, three_d, three_times): + if file_type == "LFRic": + # Generate an appropriate synthetic LFRic file. + if three_times: + n_times = 3 + else: + n_times = 1 + + # Use a cubesphere size ~equivalent to our UM test data. + cells_per_panel_edge = _N_CUBESPHERE_UM_EQUIVALENT + create_kwargs = dict(c_size=cells_per_panel_edge, n_times=n_times) + + if three_d: + create_kwargs["n_levels"] = 71 + + # Will re-use a file if already present. + file_path = make_cubesphere_testfile(**create_kwargs) + + else: + # Locate the appropriate UM file. + if three_times: + # pa/pb003 files + numeric = "003" + else: + # pa/pb000 files + numeric = "000" + + if three_d: + # theta diagnostic, N1280 file w/ 71 levels (1920, 2560, 71) + file_name = f"umglaa_pb{numeric}-theta" + else: + # surface_temp diagnostic, N1280 file (1920, 2560) + file_name = f"umglaa_pa{numeric}-surfacetemp" + + file_suffices = { + "UM": "", # packed FF (WGDOS lbpack = 1) + "UM_lbpack0": ".uncompressed", # unpacked FF (lbpack = 0) + "UM_netcdf": ".nc", # UM file -> Iris -> NetCDF file + } + suffix = file_suffices[file_type] + + file_path = (BENCHMARK_DATA / file_name).with_suffix(suffix) + if not file_path.exists(): + message = "\n".join( + [ + f"Expected local file not found: {file_path}", + "Available from the UK Met Office.", + ] + ) + raise FileNotFoundError(message) + + self.file_path = file_path + self.file_type = file_type + + def load(self): + with PARSE_UGRID_ON_LOAD.context(): + return load_cube(str(self.file_path)) diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py new file mode 100644 index 0000000000..47eb255513 --- /dev/null +++ b/benchmarks/benchmarks/cperf/equality.py @@ -0,0 +1,58 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import SingleDiagnosticMixin +from .. import on_demand_benchmark + + +class EqualityMixin(SingleDiagnosticMixin): + """ + Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + :class:`~iris.cube.Cube`\\ s that have been loaded from file. + """ + + # Cut down the parent parameters. + params = [["LFRic", "UM"]] + + def setup(self, file_type, three_d=False, three_times=False): + super().setup(file_type, three_d, three_times) + self.cube = self.load() + self.other_cube = self.load() + + +@on_demand_benchmark +class CubeEquality(EqualityMixin): + """ + Benchmark time and memory costs of comparing LFRic and UM + :class:`~iris.cube.Cube`\\ s. + """ + + def _comparison(self): + _ = self.cube == self.other_cube + + def peakmem_eq(self, file_type): + self._comparison() + + def time_eq(self, file_type): + self._comparison() + + +@on_demand_benchmark +class MeshEquality(EqualityMixin): + """Provides extra context for :class:`CubeEquality`.""" + + params = [["LFRic"]] + + def _comparison(self): + _ = self.cube.mesh == self.other_cube.mesh + + def peakmem_eq(self, file_type): + self._comparison() + + def time_eq(self, file_type): + self._comparison() diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py new file mode 100644 index 0000000000..04bb7e1a61 --- /dev/null +++ b/benchmarks/benchmarks/cperf/load.py @@ -0,0 +1,57 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import SingleDiagnosticMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class SingleDiagnosticLoad(SingleDiagnosticMixin): + def time_load(self, _, __, ___): + """ + The 'real world comparison' + * UM coords are always realised (DimCoords). + * LFRic coords are not realised by default (MeshCoords). + + """ + cube = self.load() + assert cube.has_lazy_data() + # UM files load lon/lat as DimCoords, which are always realised. + expecting_lazy_coords = self.file_type == "LFRic" + for coord_name in "longitude", "latitude": + coord = cube.coord(coord_name) + assert coord.has_lazy_points() == expecting_lazy_coords + assert coord.has_lazy_bounds() == expecting_lazy_coords + + def time_load_w_realised_coords(self, _, __, ___): + """A valuable extra comparison where both UM and LFRic coords are realised.""" + cube = self.load() + for coord_name in "longitude", "latitude": + coord = cube.coord(coord_name) + # Don't touch actual points/bounds objects - permanent + # realisation plays badly with ASV's re-run strategy. + if coord.has_lazy_points(): + coord.core_points().compute() + if coord.has_lazy_bounds(): + coord.core_bounds().compute() + + +@on_demand_benchmark +class SingleDiagnosticRealise(SingleDiagnosticMixin): + # The larger files take a long time to realise. + timeout = 600.0 + + def setup(self, file_type, three_d, three_times): + super().setup(file_type, three_d, three_times) + self.loaded_cube = self.load() + + def time_realise(self, _, __, ___): + # Don't touch loaded_cube.data - permanent realisation plays badly with + # ASV's re-run strategy. + assert self.loaded_cube.has_lazy_data() + self.loaded_cube.core_data().compute() diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py new file mode 100644 index 0000000000..2eb60e2ab5 --- /dev/null +++ b/benchmarks/benchmarks/cperf/save.py @@ -0,0 +1,47 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" + +from iris import save + +from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import ( + make_cube_like_2d_cubesphere, + make_cube_like_umfield, +) + + +@on_demand_benchmark +class NetcdfSave: + """ + Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + Parametrised by file type. + + """ + + params = ["LFRic", "UM"] + param_names = ["data type"] + + def setup(self, data_type): + if data_type == "LFRic": + self.cube = make_cube_like_2d_cubesphere( + n_cube=_N_CUBESPHERE_UM_EQUIVALENT, with_mesh=True + ) + else: + self.cube = make_cube_like_umfield(_UM_DIMS_YX) + + def _save_data(self, cube): + save(cube, "tmp.nc") + + def time_save_data_netcdf(self, data_type): + self._save_data(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save_data_netcdf(self, data_type): + self._save_data(self.cube) diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 3cfa6b248b..5889ce872b 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -10,11 +10,13 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import analysis, aux_factory, coords, cube +from . import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup +from .generate_data.stock import sample_meshcoord -def setup(): + +def setup(*params): """General variables needed by multiple benchmark classes.""" global data_1d global data_2d @@ -66,10 +68,6 @@ def time_add(self): general_cube_copy = general_cube.copy(data=data_2d) self.add_method(general_cube_copy, *self.add_args) - def time_return(self): - """Return a cube that includes an instance of the benchmarked component.""" - self.cube - class Cube: def time_basic(self): @@ -170,6 +168,41 @@ def setup(self): self.setup_common() +class MeshCoord: + params = [ + 6, # minimal cube-sphere + int(1e6), # realistic cube-sphere size + ARTIFICIAL_DIM_SIZE, # To match size in :class:`AuxCoord` + ] + param_names = ["number of faces"] + + def setup(self, n_faces): + mesh_kwargs = dict( + n_nodes=n_faces + 2, n_edges=n_faces * 2, n_faces=n_faces + ) + + self.mesh_coord = sample_meshcoord(sample_mesh_kwargs=mesh_kwargs) + self.data = np.zeros(n_faces) + self.cube_blank = cube.Cube(data=self.data) + self.cube = self.create() + + def create(self): + return cube.Cube( + data=self.data, aux_coords_and_dims=[(self.mesh_coord, 0)] + ) + + def time_create(self, n_faces): + _ = self.create() + + @disable_repeat_between_setup + def time_add(self, n_faces): + self.cube_blank.add_aux_coord(self.mesh_coord, 0) + + @disable_repeat_between_setup + def time_remove(self, n_faces): + self.cube.remove_coord(self.mesh_coord) + + class Merge: def setup(self): self.cube_list = cube.CubeList() diff --git a/benchmarks/benchmarks/experimental/__init__.py b/benchmarks/benchmarks/experimental/__init__.py new file mode 100644 index 0000000000..f16e400bce --- /dev/null +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -0,0 +1,9 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmark tests for the experimental module. + +""" diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py new file mode 100644 index 0000000000..2f9bb04e35 --- /dev/null +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -0,0 +1,191 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmark tests for the experimental.ugrid module. + +""" + +from copy import deepcopy + +import numpy as np + +from iris.experimental import ugrid + +from ... import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup +from ...generate_data.stock import sample_mesh + + +class UGridCommon: + """ + A base class running a generalised suite of benchmarks for any ugrid object. + Object to be specified in a subclass. + + ASV will run the benchmarks within this class for any subclasses. + + ASV will not benchmark this class as setup() triggers a NotImplementedError. + (ASV has not yet released ABC/abstractmethod support - asv#838). + + """ + + params = [ + 6, # minimal cube-sphere + int(1e6), # realistic cube-sphere size + ] + param_names = ["number of faces"] + + def setup(self, *params): + self.object = self.create() + + def create(self): + raise NotImplementedError + + def time_create(self, *params): + """Create an instance of the benchmarked object. create() method is + specified in the subclass.""" + self.create() + + +class Connectivity(UGridCommon): + def setup(self, n_faces): + self.array = np.zeros([n_faces, 3], dtype=np.int) + super().setup(n_faces) + + def create(self): + return ugrid.Connectivity( + indices=self.array, cf_role="face_node_connectivity" + ) + + def time_indices(self, n_faces): + _ = self.object.indices + + def time_location_lengths(self, n_faces): + # Proofed against the Connectivity name change (633ed17). + if getattr(self.object, "src_lengths", False): + meth = self.object.src_lengths + else: + meth = self.object.location_lengths + _ = meth() + + def time_validate_indices(self, n_faces): + self.object.validate_indices() + + +@disable_repeat_between_setup +class ConnectivityLazy(Connectivity): + """Lazy equivalent of :class:`Connectivity`.""" + + def setup(self, n_faces): + super().setup(n_faces) + self.array = self.object.lazy_indices() + self.object = self.create() + + +class Mesh(UGridCommon): + def setup(self, n_faces, lazy=False): + #### + # Steal everything from the sample mesh for benchmarking creation of a + # brand new mesh. + source_mesh = sample_mesh( + n_nodes=n_faces + 2, + n_edges=n_faces * 2, + n_faces=n_faces, + lazy_values=lazy, + ) + + def get_coords_and_axes(location): + search_kwargs = {f"include_{location}s": True} + return [ + (source_mesh.coord(axis=axis, **search_kwargs), axis) + for axis in ("x", "y") + ] + + self.mesh_kwargs = dict( + topology_dimension=source_mesh.topology_dimension, + node_coords_and_axes=get_coords_and_axes("node"), + connectivities=source_mesh.connectivities(), + edge_coords_and_axes=get_coords_and_axes("edge"), + face_coords_and_axes=get_coords_and_axes("face"), + ) + #### + + super().setup(n_faces) + + self.face_node = self.object.face_node_connectivity + self.node_x = self.object.node_coords.node_x + # Kwargs for reuse in search and remove methods. + self.connectivities_kwarg = dict(cf_role="edge_node_connectivity") + self.coords_kwarg = dict(include_faces=True) + + # TODO: an opportunity for speeding up runtime if needed, since + # eq_object is not needed for all benchmarks. Just don't generate it + # within a benchmark - the execution time is large enough that it + # could be a significant portion of the benchmark - makes regressions + # smaller and could even pick up regressions in copying instead! + self.eq_object = deepcopy(self.object) + + def create(self): + return ugrid.Mesh(**self.mesh_kwargs) + + def time_add_connectivities(self, n_faces): + self.object.add_connectivities(self.face_node) + + def time_add_coords(self, n_faces): + self.object.add_coords(node_x=self.node_x) + + def time_connectivities(self, n_faces): + _ = self.object.connectivities(**self.connectivities_kwarg) + + def time_coords(self, n_faces): + _ = self.object.coords(**self.coords_kwarg) + + def time_eq(self, n_faces): + _ = self.object == self.eq_object + + def time_remove_connectivities(self, n_faces): + self.object.remove_connectivities(**self.connectivities_kwarg) + + def time_remove_coords(self, n_faces): + self.object.remove_coords(**self.coords_kwarg) + + +@disable_repeat_between_setup +class MeshLazy(Mesh): + """Lazy equivalent of :class:`Mesh`.""" + + def setup(self, n_faces, lazy=True): + super().setup(n_faces, lazy=lazy) + + +class MeshCoord(UGridCommon): + # Add extra parameter value to match AuxCoord benchmarking. + params = UGridCommon.params + [ARTIFICIAL_DIM_SIZE] + + def setup(self, n_faces, lazy=False): + self.mesh = sample_mesh( + n_nodes=n_faces + 2, + n_edges=n_faces * 2, + n_faces=n_faces, + lazy_values=lazy, + ) + + super().setup(n_faces) + + def create(self): + return ugrid.MeshCoord(mesh=self.mesh, location="face", axis="x") + + def time_points(self, n_faces): + _ = self.object.points + + def time_bounds(self, n_faces): + _ = self.object.bounds + + +@disable_repeat_between_setup +class MeshCoordLazy(MeshCoord): + """Lazy equivalent of :class:`MeshCoord`.""" + + def setup(self, n_faces, lazy=True): + super().setup(n_faces, lazy=lazy) diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py new file mode 100644 index 0000000000..3b2d77a80a --- /dev/null +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -0,0 +1,250 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks stages of operation of the function +:func:`iris.experimental.ugrid.utils.recombine_submeshes`. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" +import os + +import dask.array as da +import numpy as np + +from iris import load, load_cube, save +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid.utils import recombine_submeshes + +from ... import TrackAddedMemoryAllocation +from ...generate_data.ugrid import make_cube_like_2d_cubesphere + + +class MixinCombineRegions: + # Characterise time taken + memory-allocated, for various stages of combine + # operations on cubesphere-like test data. + params = [4, 500] + param_names = ["cubesphere-N"] + + def _parametrised_cache_filename(self, n_cubesphere, content_name): + return f"cube_C{n_cubesphere}_{content_name}.nc" + + def _make_region_cubes(self, full_mesh_cube): + """Make a fixed number of region cubes from a full meshcube.""" + # Divide the cube into regions. + n_faces = full_mesh_cube.shape[-1] + # Start with a simple list of face indices + # first extend to multiple of 5 + n_faces_5s = 5 * ((n_faces + 1) // 5) + i_faces = np.arange(n_faces_5s, dtype=int) + # reshape (5N,) to (N, 5) + i_faces = i_faces.reshape((n_faces_5s // 5, 5)) + # reorder [2, 3, 4, 0, 1] within each block of 5 + i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) + # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] + i_faces = i_faces.flatten() + # reduce back to orignal length, wrap any overflows into valid range + i_faces = i_faces[:n_faces] % n_faces + + # Divide into regions -- always slightly uneven, since 7 doesn't divide + n_regions = 7 + n_facesperregion = n_faces // n_regions + i_face_regions = (i_faces // n_facesperregion) % n_regions + region_inds = [ + np.where(i_face_regions == i_region)[0] + for i_region in range(n_regions) + ] + # NOTE: this produces 7 regions, with near-adjacent value ranges but + # with some points "moved" to an adjacent region. + # Also, region-0 is bigger (because of not dividing by 7). + + # Finally, make region cubes with these indices. + region_cubes = [full_mesh_cube[..., inds] for inds in region_inds] + return region_cubes + + def setup_cache(self): + """Cache all the necessary source data on disk.""" + + # Control dask, to minimise memory usage + allow largest data. + self.fix_dask_settings() + + for n_cubesphere in self.params: + # Do for each parameter, since "setup_cache" is NOT parametrised + mesh_cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=True + ) + # Save to files which include the parameter in the names. + save( + mesh_cube, + self._parametrised_cache_filename(n_cubesphere, "meshcube"), + ) + region_cubes = self._make_region_cubes(mesh_cube) + save( + region_cubes, + self._parametrised_cache_filename(n_cubesphere, "regioncubes"), + ) + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=True + ): + """ + The combine-tests "standard" setup operation. + + Load the source cubes (full-mesh + region) from disk. + These are specific to the cubesize parameter. + The data is cached on disk rather than calculated, to avoid any + pre-loading of the process memory allocation. + + If 'imaginary_data' is set (default), the region cubes data is replaced + with lazy data in the form of a da.zeros(). Otherwise, the region data + is lazy data from the files. + + If 'create_result_cube' is set, create "self.combined_cube" containing + the (still lazy) result. + + NOTE: various test classes override + extend this. + + """ + + # Load source cubes (full-mesh and regions) + with PARSE_UGRID_ON_LOAD.context(): + self.full_mesh_cube = load_cube( + self._parametrised_cache_filename(n_cubesphere, "meshcube") + ) + self.region_cubes = load( + self._parametrised_cache_filename(n_cubesphere, "regioncubes") + ) + + # Remove all var-names from loaded cubes, which can otherwise cause + # problems. Also implement 'imaginary' data. + for cube in self.region_cubes + [self.full_mesh_cube]: + cube.var_name = None + for coord in cube.coords(): + coord.var_name = None + if imaginary_data: + # Replace cube data (lazy file data) with 'imaginary' data. + # This has the same lazy-array attributes, but is allocated by + # creating chunks on demand instead of loading from file. + data = cube.lazy_data() + data = da.zeros( + data.shape, dtype=data.dtype, chunks=data.chunksize + ) + cube.data = data + + if create_result_cube: + self.recombined_cube = self.recombine() + + # Fix dask usage mode for all the subsequent performance tests. + self.fix_dask_settings() + + def fix_dask_settings(self): + """ + Fix "standard" dask behaviour for time+space testing. + + Currently this is single-threaded mode, with known chunksize, + which is optimised for space saving so we can test largest data. + + """ + + import dask.config as dcfg + + # Use single-threaded, to avoid process-switching costs and minimise memory usage. + # N.B. generally may be slower, but use less memory ? + dcfg.set(scheduler="single-threaded") + # Configure iris._lazy_data.as_lazy_data to aim for 100Mb chunks + dcfg.set({"array.chunk-size": "128Mib"}) + + def recombine(self): + # A handy general shorthand for the main "combine" operation. + result = recombine_submeshes( + self.full_mesh_cube, + self.region_cubes, + index_coord_name="i_mesh_face", + ) + return result + + +class CombineRegionsCreateCube(MixinCombineRegions): + """ + Time+memory costs of creating a combined-regions cube. + + The result is lazy, and we don't do the actual calculation. + + """ + + def setup(self, n_cubesphere): + # In this case only, do *not* create the result cube. + # That is the operation we want to test. + super().setup(n_cubesphere, create_result_cube=False) + + def time_create_combined_cube(self, n_cubesphere): + self.recombine() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_create_combined_cube(self, n_cubesphere): + self.recombine() + + +class CombineRegionsComputeRealData(MixinCombineRegions): + """ + Time+memory costs of computing combined-regions data. + """ + + def time_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + +class CombineRegionsSaveData(MixinCombineRegions): + """ + Test saving *only*, having replaced the input cube data with 'imaginary' + array data, so that input data is not loaded from disk during the save + operation. + + """ + + def time_save(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + save(self.recombined_cube, "tmp.nc") + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") + + def track_filesize_saved(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") + return os.path.getsize("tmp.nc") * 1.0e-6 + + +CombineRegionsSaveData.track_filesize_saved.unit = "Mb" + + +class CombineRegionsFileStreamedCalc(MixinCombineRegions): + """ + Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region + cubes on disk. + """ + + def setup(self, n_cubesphere): + # In this case only, do *not* replace the loaded regions data with + # 'imaginary' data, as we want to test file-to-file calculation+save. + super().setup(n_cubesphere, imaginary_data=False) + + def time_stream_file2file(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + save(self.recombined_cube, "tmp.nc") + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_stream_file2file(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py new file mode 100644 index 0000000000..78b971d9de --- /dev/null +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -0,0 +1,123 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for benchmarking. + +Data generated using Iris should use :func:`run_function_elsewhere`, which +means that data is generated using a fixed version of Iris and a fixed +environment, rather than those that get changed when the benchmarking run +checks out a new commit. + +Downstream use of data generated 'elsewhere' requires saving; usually in a +NetCDF file. Could also use pickling but there is a potential risk if the +benchmark sequence runs over two different Python versions. + +""" +from contextlib import contextmanager +from inspect import getsource +from os import environ +from pathlib import Path +from subprocess import CalledProcessError, check_output, run +from textwrap import dedent +from warnings import warn + +from iris._lazy_data import as_concrete_data +from iris.fileformats import netcdf + +#: Python executable used by :func:`run_function_elsewhere`, set via env +#: variable of same name. Must be path of Python within an environment that +#: includes Iris (including dependencies and test modules) and Mule. +try: + DATA_GEN_PYTHON = environ["DATA_GEN_PYTHON"] + _ = check_output([DATA_GEN_PYTHON, "-c", "a = True"]) +except KeyError: + error = "Env variable DATA_GEN_PYTHON not defined." + raise KeyError(error) +except (CalledProcessError, FileNotFoundError, PermissionError): + error = ( + "Env variable DATA_GEN_PYTHON not a runnable python executable path." + ) + raise ValueError(error) + +# The default location of data files used in benchmarks. Used by CI. +default_data_dir = (Path(__file__).parents[2] / ".data").resolve() +# Optionally override the default data location with environment variable. +BENCHMARK_DATA = Path(environ.get("BENCHMARK_DATA", default_data_dir)) +if BENCHMARK_DATA == default_data_dir: + BENCHMARK_DATA.mkdir(exist_ok=True) + message = ( + f"No BENCHMARK_DATA env var, defaulting to {BENCHMARK_DATA}. " + "Note that some benchmark files are GB in size." + ) + warn(message) +elif not BENCHMARK_DATA.is_dir(): + message = f"Not a directory: {BENCHMARK_DATA} ." + raise ValueError(message) + +# Manual flag to allow the rebuilding of synthetic data. +# False forces a benchmark run to re-make all the data files. +REUSE_DATA = True + + +def run_function_elsewhere(func_to_run, *args, **kwargs): + """ + Run a given function using the :const:`DATA_GEN_PYTHON` executable. + + This structure allows the function to be written natively. + + Parameters + ---------- + func_to_run : FunctionType + The function object to be run. + NOTE: the function must be completely self-contained, i.e. perform all + its own imports (within the target :const:`DATA_GEN_PYTHON` + environment). + *args : tuple, optional + Function call arguments. Must all be expressible as simple literals, + i.e. the ``repr`` must be a valid literal expression. + **kwargs: dict, optional + Function call keyword arguments. All values must be expressible as + simple literals (see ``*args``). + + Returns + ------- + str + The ``stdout`` from the run. + + """ + func_string = dedent(getsource(func_to_run)) + func_string = func_string.replace("@staticmethod\n", "") + func_call_term_strings = [repr(arg) for arg in args] + func_call_term_strings += [ + f"{name}={repr(val)}" for name, val in kwargs.items() + ] + func_call_string = ( + f"{func_to_run.__name__}(" + ",".join(func_call_term_strings) + ")" + ) + python_string = "\n".join([func_string, func_call_string]) + result = run( + [DATA_GEN_PYTHON, "-c", python_string], capture_output=True, check=True + ) + return result.stdout + + +@contextmanager +def load_realised(): + """ + Force NetCDF loading with realised arrays. + + Since passing between data generation and benchmarking environments is via + file loading, but some benchmarks are only meaningful if starting with real + arrays. + """ + from iris.fileformats.netcdf import _get_cf_var_data as pre_patched + + def patched(cf_var, filename): + return as_concrete_data(pre_patched(cf_var, filename)) + + netcdf._get_cf_var_data = patched + yield netcdf + netcdf._get_cf_var_data = pre_patched diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py new file mode 100644 index 0000000000..eaf46bb405 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -0,0 +1,166 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. + +See :mod:`benchmarks.generate_data` for an explanation of this structure. +""" + +from hashlib import sha256 +import json +from pathlib import Path + +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, load_mesh + +from . import BENCHMARK_DATA, REUSE_DATA, load_realised, run_function_elsewhere + + +def hash_args(*args, **kwargs): + """Convert arguments into a short hash - for preserving args in filenames.""" + arg_string = str(args) + kwarg_string = json.dumps(kwargs) + full_string = arg_string + kwarg_string + return sha256(full_string.encode()).hexdigest()[:10] + + +def _create_file__xios_common(func_name, **kwargs): + def _external(func_name_, temp_file_dir, **kwargs_): + from iris.tests.stock import netcdf + + func = getattr(netcdf, func_name_) + print(func(temp_file_dir, **kwargs_), end="") + + args_hash = hash_args(**kwargs) + save_path = (BENCHMARK_DATA / f"{func_name}_{args_hash}").with_suffix( + ".nc" + ) + if not REUSE_DATA or not save_path.is_file(): + # The xios functions take control of save location so need to move to + # a more specific name that allows re-use. + actual_path = run_function_elsewhere( + _external, + func_name_=func_name, + temp_file_dir=str(BENCHMARK_DATA), + **kwargs, + ) + Path(actual_path.decode()).replace(save_path) + return save_path + + +def create_file__xios_2d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1 +): + """ + Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. + + Have taken control of temp_file_dir + + todo: is create_file__xios_2d_face_half_levels still appropriate now we can + properly save Mesh Cubes? + """ + + return _create_file__xios_common( + func_name="create_file__xios_2d_face_half_levels", + dataset_name=dataset_name, + n_faces=n_faces, + n_times=n_times, + ) + + +def create_file__xios_3d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 +): + """ + Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. + + Have taken control of temp_file_dir + + todo: is create_file__xios_3d_face_half_levels still appropriate now we can + properly save Mesh Cubes? + """ + + return _create_file__xios_common( + func_name="create_file__xios_3d_face_half_levels", + dataset_name=dataset_name, + n_faces=n_faces, + n_times=n_times, + n_levels=n_levels, + ) + + +def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): + """Wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" + + def _external(*args, **kwargs): + from iris.experimental.ugrid import save_mesh + from iris.tests.stock.mesh import sample_mesh + + save_path_ = kwargs.pop("save_path") + # Always saving, so laziness is irrelevant. Use lazy to save time. + kwargs["lazy_values"] = True + new_mesh = sample_mesh(*args, **kwargs) + save_mesh(new_mesh, save_path_) + + arg_list = [n_nodes, n_faces, n_edges] + args_hash = hash_args(*arg_list) + save_path = (BENCHMARK_DATA / f"sample_mesh_{args_hash}").with_suffix( + ".nc" + ) + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere( + _external, *arg_list, save_path=str(save_path) + ) + with PARSE_UGRID_ON_LOAD.context(): + if not lazy_values: + # Realise everything. + with load_realised(): + mesh = load_mesh(str(save_path)) + else: + mesh = load_mesh(str(save_path)) + return mesh + + +def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"): + """ + Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. + + Parameters deviate from the original as cannot pass a + :class:`iris.experimental.ugrid.Mesh to the separate Python instance - must + instead generate the Mesh as well. + + MeshCoords cannot be saved to file, so the _external method saves the + MeshCoord's Mesh, then the original Python instance loads in that Mesh and + regenerates the MeshCoord from there. + """ + + def _external(sample_mesh_kwargs_, save_path_): + from iris.experimental.ugrid import save_mesh + from iris.tests.stock.mesh import sample_mesh, sample_meshcoord + + if sample_mesh_kwargs_: + input_mesh = sample_mesh(**sample_mesh_kwargs_) + else: + input_mesh = None + # Don't parse the location or axis arguments - only saving the Mesh at + # this stage. + new_meshcoord = sample_meshcoord(mesh=input_mesh) + save_mesh(new_meshcoord.mesh, save_path_) + + args_hash = hash_args(**sample_mesh_kwargs) + save_path = ( + BENCHMARK_DATA / f"sample_mesh_coord_{args_hash}" + ).with_suffix(".nc") + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere( + _external, + sample_mesh_kwargs_=sample_mesh_kwargs, + save_path_=str(save_path), + ) + with PARSE_UGRID_ON_LOAD.context(): + with load_realised(): + source_mesh = load_mesh(str(save_path)) + # Regenerate MeshCoord from its Mesh, which we saved. + return source_mesh.to_MeshCoord(location=location, axis=axis) diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py new file mode 100644 index 0000000000..527b49a6bb --- /dev/null +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -0,0 +1,195 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for UGRID-related benchmarking. +""" +from iris import load_cube as iris_loadcube +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from . import BENCHMARK_DATA, REUSE_DATA, load_realised, run_function_elsewhere +from .stock import ( + create_file__xios_2d_face_half_levels, + create_file__xios_3d_face_half_levels, +) + + +def generate_cube_like_2d_cubesphere( + n_cube: int, with_mesh: bool, output_path: str +): + """ + Construct and save to file an LFRIc cubesphere-like cube for a given + cubesphere size, *or* a simpler structured (UM-like) cube of equivalent + size. + + NOTE: this function is *NEVER* called from within this actual package. + Instead, it is to be called via benchmarks.remote_data_generation, + so that it can use up-to-date facilities, independent of the ASV controlled + environment which contains the "Iris commit under test". + This means: + * it must be completely self-contained : i.e. it includes all its + own imports, and saves results to an output file. + + """ + from iris import save + from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube + + n_face_nodes = n_cube * n_cube + n_faces = 6 * n_face_nodes + + # Set n_nodes=n_faces and n_edges=2*n_faces + # : Not exact, but similar to a 'real' cubesphere. + n_nodes = n_faces + n_edges = 2 * n_faces + if with_mesh: + mesh = sample_mesh( + n_nodes=n_nodes, n_faces=n_faces, n_edges=n_edges, lazy_values=True + ) + cube = sample_mesh_cube(mesh=mesh, n_z=1) + else: + cube = sample_mesh_cube(nomesh_faces=n_faces, n_z=1) + + # Strip off the 'extra' aux-coord mapping the mesh, which sample-cube adds + # but which we don't want. + cube.remove_coord("mesh_face_aux") + + # Save the result to a named file. + save(cube, output_path) + + +def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): + """ + Generate an LFRIc cubesphere-like cube for a given cubesphere size, + *or* a simpler structured (UM-like) cube of equivalent size. + + All the cube data, coords and mesh content are LAZY, and produced without + allocating large real arrays (to allow peak-memory testing). + + NOTE: the actual cube generation is done in a stable Iris environment via + benchmarks.remote_data_generation, so it is all channeled via cached netcdf + files in our common testdata directory. + + """ + identifying_filename = ( + f"cube_like_2d_cubesphere_C{n_cube}_Mesh={with_mesh}.nc" + ) + filepath = BENCHMARK_DATA / identifying_filename + if not filepath.exists(): + # Create the required testfile, by running the generation code remotely + # in a 'fixed' python environment. + run_function_elsewhere( + generate_cube_like_2d_cubesphere, + n_cube, + with_mesh=with_mesh, + output_path=str(filepath), + ) + + # File now *should* definitely exist: content is simply the desired cube. + with PARSE_UGRID_ON_LOAD.context(): + cube = iris_loadcube(str(filepath)) + + # Ensure correct laziness. + _ = cube.data + for coord in cube.coords(mesh_coords=False): + assert not coord.has_lazy_points() + assert not coord.has_lazy_bounds() + if cube.mesh: + for coord in cube.mesh.coords(): + assert coord.has_lazy_points() + for conn in cube.mesh.connectivities(): + assert conn.has_lazy_indices() + + return cube + + +def make_cube_like_umfield(xy_dims): + """ + Create a "UM-like" cube with lazy content, for save performance testing. + + Roughly equivalent to a single current UM cube, to be compared with + a "make_cube_like_2d_cubesphere(n_cube=_N_CUBESPHERE_UM_EQUIVALENT)" + (see below). + + Note: probably a bit over-simplified, as there is no time coord, but that + is probably equally true of our LFRic-style synthetic data. + + Args: + * xy_dims (2-tuple): + Set the horizontal dimensions = n-lats, n-lons. + + """ + + def _external(xy_dims_, save_path_): + from dask import array as da + import numpy as np + + from iris import save + from iris.coords import DimCoord + from iris.cube import Cube + + nz, ny, nx = (1,) + xy_dims_ + + # Base data : Note this is float32 not float64 like LFRic/XIOS outputs. + lazy_data = da.zeros((nz, ny, nx), dtype=np.float32) + cube = Cube(lazy_data, long_name="structured_phenom") + + # Add simple dim coords also. + z_dimco = DimCoord(np.arange(nz), long_name="level", units=1) + y_dimco = DimCoord( + np.linspace(-90.0, 90.0, ny), + standard_name="latitude", + units="degrees", + ) + x_dimco = DimCoord( + np.linspace(-180.0, 180.0, nx), + standard_name="longitude", + units="degrees", + ) + for idim, co in enumerate([z_dimco, y_dimco, x_dimco]): + cube.add_dim_coord(co, idim) + + save(cube, save_path_) + + save_path = ( + BENCHMARK_DATA / f"make_cube_like_umfield_{xy_dims}" + ).with_suffix(".nc") + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere(_external, xy_dims, str(save_path)) + with PARSE_UGRID_ON_LOAD.context(): + with load_realised(): + cube = iris_loadcube(str(save_path)) + + return cube + + +def make_cubesphere_testfile(c_size, n_levels=0, n_times=1): + """ + Build a C cubesphere testfile in a given directory, with a standard naming. + If n_levels > 0 specified: 3d file with the specified number of levels. + Return the file path. + + todo: is create_file__xios... still appropriate now we can properly save + Mesh Cubes? + + """ + n_faces = 6 * c_size * c_size + stem_name = f"mesh_cubesphere_C{c_size}_t{n_times}" + kwargs = dict( + temp_file_dir=None, + dataset_name=stem_name, # N.B. function adds the ".nc" extension + n_times=n_times, + n_faces=n_faces, + ) + + three_d = n_levels > 0 + if three_d: + kwargs["n_levels"] = n_levels + kwargs["dataset_name"] += f"_{n_levels}levels" + func = create_file__xios_3d_face_half_levels + else: + func = create_file__xios_2d_face_half_levels + + file_path = func(**kwargs) + return file_path diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py new file mode 100644 index 0000000000..39773bbb4b --- /dev/null +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -0,0 +1,197 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Generate FF, PP and NetCDF files based on a minimal synthetic FF file. + +NOTE: uses the Mule package, so depends on an environment with Mule installed. +""" + + +def _create_um_files( + len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict +) -> None: + """ + Generate an FF object of given shape and compression, save to FF/PP/NetCDF. + + This is run externally + (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports + are self-contained and input parameters are simple types. + """ + from copy import deepcopy + from datetime import datetime + from tempfile import NamedTemporaryFile + + from mule import ArrayDataProvider, Field3, FieldsFile + from mule.pp import fields_to_pp_file + import numpy as np + + from iris import load_cube + from iris import save as save_cube + + template = { + "fixed_length_header": {"dataset_type": 3, "grid_staggering": 3}, + "integer_constants": { + "num_p_levels": len_z, + "num_cols": len_x, + "num_rows": len_y, + }, + "real_constants": {}, + "level_dependent_constants": {"dims": (len_z + 1, None)}, + } + new_ff = FieldsFile.from_template(deepcopy(template)) + + data_array = np.arange(len_x * len_y).reshape(len_x, len_y) + array_provider = ArrayDataProvider(data_array) + + def add_field(level_: int, time_step_: int) -> None: + """ + Add a minimal field to the new :class:`~mule.FieldsFile`. + + Includes the minimum information to allow Mule saving and Iris + loading, as well as incrementation for vertical levels and time + steps to allow generation of z and t dimensions. + """ + new_field = Field3.empty() + # To correspond to the header-release 3 class used. + new_field.lbrel = 3 + # Mule uses the first element of the lookup to test for + # unpopulated fields (and skips them), so the first element should + # be set to something. The year will do. + new_field.raw[1] = datetime.now().year + + # Horizontal. + new_field.lbcode = 1 + new_field.lbnpt = len_x + new_field.lbrow = len_y + new_field.bdx = new_ff.real_constants.col_spacing + new_field.bdy = new_ff.real_constants.row_spacing + new_field.bzx = new_ff.real_constants.start_lon - 0.5 * new_field.bdx + new_field.bzy = new_ff.real_constants.start_lat - 0.5 * new_field.bdy + + # Hemisphere. + new_field.lbhem = 32 + # Processing. + new_field.lbproc = 0 + + # Vertical. + # Hybrid height values by simulating sequences similar to those in a + # theta file. + new_field.lbvc = 65 + if level_ == 0: + new_field.lblev = 9999 + else: + new_field.lblev = level_ + + level_1 = level_ + 1 + six_rec = 20 / 3 + three_rec = six_rec / 2 + + new_field.blev = level_1**2 * six_rec - six_rec + new_field.brsvd1 = ( + level_1**2 * six_rec + (six_rec * level_1) - three_rec + ) + + brsvd2_simulated = np.linspace(0.995, 0, len_z) + shift = min(len_z, 2) + bhrlev_simulated = np.concatenate( + [np.ones(shift), brsvd2_simulated[:-shift]] + ) + new_field.brsvd2 = brsvd2_simulated[level_] + new_field.bhrlev = bhrlev_simulated[level_] + + # Time. + new_field.lbtim = 11 + + new_field.lbyr = time_step_ + for attr_name in ["lbmon", "lbdat", "lbhr", "lbmin", "lbsec"]: + setattr(new_field, attr_name, 0) + + new_field.lbyrd = time_step_ + 1 + for attr_name in ["lbmond", "lbdatd", "lbhrd", "lbmind", "lbsecd"]: + setattr(new_field, attr_name, 0) + + # Data and packing. + new_field.lbuser1 = 1 + new_field.lbpack = int(compress) + new_field.bacc = 0 + new_field.bmdi = -1 + new_field.lbext = 0 + new_field.set_data_provider(array_provider) + + new_ff.fields.append(new_field) + + for time_step in range(len_t): + for level in range(len_z): + add_field(level, time_step + 1) + + ff_path = save_paths.get("FF", None) + pp_path = save_paths.get("PP", None) + nc_path = save_paths.get("NetCDF", None) + + if ff_path: + new_ff.to_file(ff_path) + if pp_path: + fields_to_pp_file(str(pp_path), new_ff.fields) + if nc_path: + temp_ff_path = None + # Need an Iris Cube from the FF content. + if ff_path: + # Use the existing file. + ff_cube = load_cube(ff_path) + else: + # Make a temporary file. + temp_ff_path = NamedTemporaryFile() + new_ff.to_file(temp_ff_path.name) + ff_cube = load_cube(temp_ff_path.name) + + save_cube(ff_cube, nc_path, zlib=compress) + if temp_ff_path: + temp_ff_path.close() + + +FILE_EXTENSIONS = {"FF": "", "PP": ".pp", "NetCDF": ".nc"} + + +def create_um_files( + len_x: int, + len_y: int, + len_z: int, + len_t: int, + compress: bool, + file_types: list, +) -> dict: + """ + Generate FF-based FF / PP / NetCDF files with specified shape and compression. + + All files representing a given shape are saved in a dedicated directory. A + dictionary of the saved paths is returned. + + If the required files exist, they are re-used, unless + :const:`benchmarks.REUSE_DATA` is ``False``. + """ + # Self contained imports to avoid linting confusion with _create_um_files(). + from . import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere + + save_name_sections = ["UM", len_x, len_y, len_z, len_t] + save_name = "_".join(str(section) for section in save_name_sections) + save_dir = BENCHMARK_DATA / save_name + if not save_dir.is_dir(): + save_dir.mkdir(parents=True) + + save_paths = {} + files_exist = True + for file_type in file_types: + file_ext = FILE_EXTENSIONS[file_type] + save_path = (save_dir / f"{compress}").with_suffix(file_ext) + files_exist = files_exist and save_path.is_file() + save_paths[file_type] = str(save_path) + + if not REUSE_DATA or not files_exist: + _ = run_function_elsewhere( + _create_um_files, len_x, len_y, len_z, len_t, compress, save_paths + ) + + return save_paths diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 3e83ea8cfe..ad54c23122 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -3,240 +3,247 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -import sys +from importlib import import_module, reload class Iris: - warmup_time = 0 - number = 1 - repeat = 10 - - def setup(self): - self.before = set(sys.modules.keys()) - - def teardown(self): - after = set(sys.modules.keys()) - diff = after - self.before - for module in diff: - sys.modules.pop(module) + @staticmethod + def _import(module_name): + """ + Have experimented with adding sleep() commands into the imported + modules. The results reveal: + + ASV avoids invoking `import x` if nothing gets called in the + benchmark (some imports were timed, but only those where calls + happened during import). + + Using reload() is not identical to importing, but does produce + results that are very close to expected import times, so this is fine + for monitoring for regressions. + It is also ideal for accurate repetitions, without the need to mess + with the ASV `number` attribute etc, since cached imports are not used + and the repetitions are therefore no faster than the first run. + """ + mod = import_module(module_name) + reload(mod) def time_iris(self): - import iris + self._import("iris") def time__concatenate(self): - import iris._concatenate + self._import("iris._concatenate") def time__constraints(self): - import iris._constraints + self._import("iris._constraints") def time__data_manager(self): - import iris._data_manager + self._import("iris._data_manager") def time__deprecation(self): - import iris._deprecation + self._import("iris._deprecation") def time__lazy_data(self): - import iris._lazy_data + self._import("iris._lazy_data") def time__merge(self): - import iris._merge + self._import("iris._merge") def time__representation(self): - import iris._representation + self._import("iris._representation") def time_analysis(self): - import iris.analysis + self._import("iris.analysis") def time_analysis__area_weighted(self): - import iris.analysis._area_weighted + self._import("iris.analysis._area_weighted") def time_analysis__grid_angles(self): - import iris.analysis._grid_angles + self._import("iris.analysis._grid_angles") def time_analysis__interpolation(self): - import iris.analysis._interpolation + self._import("iris.analysis._interpolation") def time_analysis__regrid(self): - import iris.analysis._regrid + self._import("iris.analysis._regrid") def time_analysis__scipy_interpolate(self): - import iris.analysis._scipy_interpolate + self._import("iris.analysis._scipy_interpolate") def time_analysis_calculus(self): - import iris.analysis.calculus + self._import("iris.analysis.calculus") def time_analysis_cartography(self): - import iris.analysis.cartography + self._import("iris.analysis.cartography") def time_analysis_geomerty(self): - import iris.analysis.geometry + self._import("iris.analysis.geometry") def time_analysis_maths(self): - import iris.analysis.maths + self._import("iris.analysis.maths") def time_analysis_stats(self): - import iris.analysis.stats + self._import("iris.analysis.stats") def time_analysis_trajectory(self): - import iris.analysis.trajectory + self._import("iris.analysis.trajectory") def time_aux_factory(self): - import iris.aux_factory + self._import("iris.aux_factory") def time_common(self): - import iris.common + self._import("iris.common") def time_common_lenient(self): - import iris.common.lenient + self._import("iris.common.lenient") def time_common_metadata(self): - import iris.common.metadata + self._import("iris.common.metadata") def time_common_mixin(self): - import iris.common.mixin + self._import("iris.common.mixin") def time_common_resolve(self): - import iris.common.resolve + self._import("iris.common.resolve") def time_config(self): - import iris.config + self._import("iris.config") def time_coord_categorisation(self): - import iris.coord_categorisation + self._import("iris.coord_categorisation") def time_coord_systems(self): - import iris.coord_systems + self._import("iris.coord_systems") def time_coords(self): - import iris.coords + self._import("iris.coords") def time_cube(self): - import iris.cube + self._import("iris.cube") def time_exceptions(self): - import iris.exceptions + self._import("iris.exceptions") def time_experimental(self): - import iris.experimental + self._import("iris.experimental") def time_fileformats(self): - import iris.fileformats + self._import("iris.fileformats") def time_fileformats__ff(self): - import iris.fileformats._ff + self._import("iris.fileformats._ff") def time_fileformats__ff_cross_references(self): - import iris.fileformats._ff_cross_references + self._import("iris.fileformats._ff_cross_references") def time_fileformats__pp_lbproc_pairs(self): - import iris.fileformats._pp_lbproc_pairs + self._import("iris.fileformats._pp_lbproc_pairs") def time_fileformats_structured_array_identification(self): - import iris.fileformats._structured_array_identification + self._import("iris.fileformats._structured_array_identification") def time_fileformats_abf(self): - import iris.fileformats.abf + self._import("iris.fileformats.abf") def time_fileformats_cf(self): - import iris.fileformats.cf + self._import("iris.fileformats.cf") def time_fileformats_dot(self): - import iris.fileformats.dot + self._import("iris.fileformats.dot") def time_fileformats_name(self): - import iris.fileformats.name + self._import("iris.fileformats.name") def time_fileformats_name_loaders(self): - import iris.fileformats.name_loaders + self._import("iris.fileformats.name_loaders") def time_fileformats_netcdf(self): - import iris.fileformats.netcdf + self._import("iris.fileformats.netcdf") def time_fileformats_nimrod(self): - import iris.fileformats.nimrod + self._import("iris.fileformats.nimrod") def time_fileformats_nimrod_load_rules(self): - import iris.fileformats.nimrod_load_rules + self._import("iris.fileformats.nimrod_load_rules") def time_fileformats_pp(self): - import iris.fileformats.pp + self._import("iris.fileformats.pp") def time_fileformats_pp_load_rules(self): - import iris.fileformats.pp_load_rules + self._import("iris.fileformats.pp_load_rules") def time_fileformats_pp_save_rules(self): - import iris.fileformats.pp_save_rules + self._import("iris.fileformats.pp_save_rules") def time_fileformats_rules(self): - import iris.fileformats.rules + self._import("iris.fileformats.rules") def time_fileformats_um(self): - import iris.fileformats.um + self._import("iris.fileformats.um") def time_fileformats_um__fast_load(self): - import iris.fileformats.um._fast_load + self._import("iris.fileformats.um._fast_load") def time_fileformats_um__fast_load_structured_fields(self): - import iris.fileformats.um._fast_load_structured_fields + self._import("iris.fileformats.um._fast_load_structured_fields") def time_fileformats_um__ff_replacement(self): - import iris.fileformats.um._ff_replacement + self._import("iris.fileformats.um._ff_replacement") def time_fileformats_um__optimal_array_structuring(self): - import iris.fileformats.um._optimal_array_structuring + self._import("iris.fileformats.um._optimal_array_structuring") def time_fileformats_um_cf_map(self): - import iris.fileformats.um_cf_map + self._import("iris.fileformats.um_cf_map") def time_io(self): - import iris.io + self._import("iris.io") def time_io_format_picker(self): - import iris.io.format_picker + self._import("iris.io.format_picker") def time_iterate(self): - import iris.iterate + self._import("iris.iterate") def time_palette(self): - import iris.palette + self._import("iris.palette") def time_plot(self): - import iris.plot + self._import("iris.plot") def time_quickplot(self): - import iris.quickplot + self._import("iris.quickplot") def time_std_names(self): - import iris.std_names + self._import("iris.std_names") def time_symbols(self): - import iris.symbols + self._import("iris.symbols") def time_tests(self): - import iris.tests + self._import("iris.tests") def time_time(self): - import iris.time + self._import("iris.time") def time_util(self): - import iris.util + self._import("iris.util") # third-party imports def time_third_party_cartopy(self): - import cartopy + self._import("cartopy") def time_third_party_cf_units(self): - import cf_units + self._import("cf_units") def time_third_party_cftime(self): - import cftime + self._import("cftime") def time_third_party_matplotlib(self): - import matplotlib + self._import("matplotlib") def time_third_party_numpy(self): - import numpy + self._import("numpy") def time_third_party_scipy(self): - import scipy + self._import("scipy") diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 20422750ef..0a5415ac2b 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -9,9 +9,10 @@ """ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords, cube, iterate +from . import ARTIFICIAL_DIM_SIZE + def setup(): """General variables needed by multiple benchmark classes.""" diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py new file mode 100644 index 0000000000..1b0ea696f6 --- /dev/null +++ b/benchmarks/benchmarks/load/__init__.py @@ -0,0 +1,187 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmark tests. + +Where applicable benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. Size should be _just_ large + enough - don't want to bloat benchmark runtime. + +""" + +from iris import AttributeConstraint, Constraint, load, load_cube +from iris.cube import Cube +from iris.fileformats.um import structured_um_loading + +from ..generate_data import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere +from ..generate_data.um_files import create_um_files + + +class LoadAndRealise: + # For data generation + timeout = 600.0 + params = [ + [(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], + [False, True], + ["FF", "PP", "NetCDF"], + ] + param_names = ["xyz", "compressed", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[2] + file_path_dict = {} + for xyz in self.params[0]: + file_path_dict[xyz] = {} + x, y, z = xyz + for compress in self.params[1]: + file_path_dict[xyz][compress] = create_um_files( + x, y, z, 1, compress, file_type_args + ) + return file_path_dict + + def setup( + self, + file_path_dict: dict, + xyz: tuple, + compress: bool, + file_format: str, + ) -> None: + self.file_path = file_path_dict[xyz][compress][file_format] + self.cube = self.load() + + def load(self) -> Cube: + return load_cube(self.file_path) + + def time_load(self, _, __, ___, ____) -> None: + _ = self.load() + + def time_realise(self, _, __, ___, ____) -> None: + # Don't touch cube.data - permanent realisation plays badly with ASV's + # re-run strategy. + assert self.cube.has_lazy_data() + self.cube.core_data().compute() + + +class STASHConstraint: + # xyz sizes mimic LoadAndRealise to maximise file re-use. + params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]] + param_names = ["xyz", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files( + x, y, z, 1, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, xyz: tuple, file_format: str + ) -> None: + self.file_path = file_path_dict[xyz][file_format] + + def time_stash_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, AttributeConstraint(STASH="m??s??i901")) + + +class TimeConstraint: + params = [[3, 20], ["FF", "PP", "NetCDF"]] + param_names = ["time_dim_len", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for time_dim_len in self.params[0]: + file_path_dict[time_dim_len] = create_um_files( + 20, 20, 5, time_dim_len, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, time_dim_len: int, file_format: str + ) -> None: + self.file_path = file_path_dict[time_dim_len][file_format] + self.time_constr = Constraint(time=lambda cell: cell.point.year < 3) + + def time_time_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, self.time_constr) + + +class ManyVars: + FILE_PATH = BENCHMARK_DATA / "many_var_file.nc" + + @staticmethod + def _create_file(save_path: str) -> None: + """Is run externally - everything must be self-contained.""" + import numpy as np + + from iris import save + from iris.coords import AuxCoord + from iris.cube import Cube + + data_len = 8 + data = np.arange(data_len) + cube = Cube(data, units="unknown") + extra_vars = 80 + names = ["coord_" + str(i) for i in range(extra_vars)] + for name in names: + coord = AuxCoord(data, long_name=name, units="unknown") + cube.add_aux_coord(coord, 0) + save(cube, save_path) + + def setup_cache(self) -> None: + if not REUSE_DATA or not self.FILE_PATH.is_file(): + # See :mod:`benchmarks.generate_data` docstring for full explanation. + _ = run_function_elsewhere( + self._create_file, + str(self.FILE_PATH), + ) + + def time_many_var_load(self) -> None: + _ = load(str(self.FILE_PATH)) + + +class StructuredFF: + """ + Test structured loading of a large-ish fieldsfile. + + Structured load of the larger size should show benefit over standard load, + avoiding the cost of merging. + """ + + params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]] + param_names = ["xyz", "structured_loading"] + + def setup_cache(self) -> dict: + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files(x, y, z, 1, False, ["FF"]) + return file_path_dict + + def setup(self, file_path_dict, xyz, structured_load): + self.file_path = file_path_dict[xyz]["FF"] + self.structured_load = structured_load + + def load(self): + """Load the whole file (in fact there is only 1 cube).""" + + def _load(): + _ = load(self.file_path) + + if self.structured_load: + with structured_um_loading(): + _load() + else: + _load() + + def time_structured_load(self, _, __, ___): + self.load() diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py new file mode 100644 index 0000000000..350a78e128 --- /dev/null +++ b/benchmarks/benchmarks/load/ugrid.py @@ -0,0 +1,130 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Mesh data loading benchmark tests. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" + +from iris import load_cube as iris_load_cube +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid import load_mesh as iris_load_mesh + +from ..generate_data.stock import create_file__xios_2d_face_half_levels + + +def synthetic_data(**kwargs): + # Ensure all uses of the synthetic data function use the common directory. + # File location is controlled by :mod:`generate_data`, hence temp_file_dir=None. + return create_file__xios_2d_face_half_levels(temp_file_dir=None, **kwargs) + + +def load_cube(*args, **kwargs): + with PARSE_UGRID_ON_LOAD.context(): + return iris_load_cube(*args, **kwargs) + + +def load_mesh(*args, **kwargs): + with PARSE_UGRID_ON_LOAD.context(): + return iris_load_mesh(*args, **kwargs) + + +class BasicLoading: + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + self.data_path = synthetic_data(**kwargs) + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=args[0]) + + def time_load_file(self, *args): + _ = load_cube(str(self.data_path)) + + def time_load_mesh(self, *args): + _ = load_mesh(str(self.data_path)) + + +class BasicLoadingTime(BasicLoading): + """Same as BasicLoading, but scaling over a time series - an unlimited dimension.""" + + # NOTE iris#4834 - careful how big the time dimension is (time dimension + # is UNLIMITED). + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=1, n_times=args[0]) + + +class DataRealisation: + # Prevent repeat runs between setup() runs - data won't be lazy after 1st. + number = 1 + # Compensate for reduced certainty by increasing number of repeats. + repeat = (10, 10, 10.0) + # Prevent ASV running its warmup, which ignores `number` and would + # therefore get a false idea of typical run time since the data would stop + # being lazy. + warmup_time = 0.0 + timeout = 300.0 + + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + data_path = synthetic_data(**kwargs) + self.cube = load_cube(str(data_path)) + + def setup(self, *args): + self.setup_common(dataset_name="Realisation", n_faces=args[0]) + + def time_realise_data(self, *args): + assert self.cube.has_lazy_data() + _ = self.cube.data[0] + + +class DataRealisationTime(DataRealisation): + """Same as DataRealisation, but scaling over a time series - an unlimited dimension.""" + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common( + dataset_name="Realisation", n_faces=1, n_times=args[0] + ) + + +class Callback: + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + def callback(cube, field, filename): + return cube[::2] + + self.data_path = synthetic_data(**kwargs) + self.callback = callback + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=args[0]) + + def time_load_file_callback(self, *args): + _ = load_cube(str(self.data_path), callback=self.callback) + + +class CallbackTime(Callback): + """Same as Callback, but scaling over a time series - an unlimited dimension.""" + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=1, n_times=args[0]) diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index e78b150438..bec5518eee 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -10,10 +10,11 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords from iris.common.metadata import AncillaryVariableMetadata +from . import ARTIFICIAL_DIM_SIZE + LONG_NAME = "air temperature" STANDARD_NAME = "air_temperature" VAR_NAME = "air_temp" diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 45905abd2f..75195c86e9 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -10,9 +10,10 @@ import matplotlib import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords, cube, plot +from . import ARTIFICIAL_DIM_SIZE + matplotlib.use("agg") @@ -22,7 +23,7 @@ def setup(self): # Should generate 10 distinct contours, regardless of dim size. dim_size = int(ARTIFICIAL_DIM_SIZE / 5) repeat_number = int(dim_size / 10) - repeat_range = range(int((dim_size ** 2) / repeat_number)) + repeat_range = range(int((dim_size**2) / repeat_number)) data = np.repeat(repeat_range, repeat_number) data = data.reshape((dim_size,) * 2) diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 6db33aa192..c315119c11 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -25,16 +25,31 @@ def setup(self) -> None: ) self.cube = iris.load_cube(cube_file_path) + # Prepare a tougher cube and chunk it + chunked_cube_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_xyt.nc"] + ) + self.chunked_cube = iris.load_cube(chunked_cube_file_path) + + # Chunked data makes the regridder run repeatedly + self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) + template_file_path = tests.get_data_path( ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] ) self.template_cube = iris.load_cube(template_file_path) - # Chunked data makes the regridder run repeatedly - self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) + # Prepare a regridding scheme + self.scheme_area_w = AreaWeighted() def time_regrid_area_w(self) -> None: # Regrid the cube onto the template. - out = self.cube.regrid(self.template_cube, AreaWeighted()) + out = self.cube.regrid(self.template_cube, self.scheme_area_w) # Realise the data out.data + + def time_regrid_area_w_new_grid(self) -> None: + # Regrid the chunked cube + out = self.chunked_cube.regrid(self.template_cube, self.scheme_area_w) + # Realise data + out.data diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py new file mode 100644 index 0000000000..3551c72528 --- /dev/null +++ b/benchmarks/benchmarks/save.py @@ -0,0 +1,54 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" +from iris import save +from iris.experimental.ugrid import save_mesh + +from . import TrackAddedMemoryAllocation +from .generate_data.ugrid import make_cube_like_2d_cubesphere + + +class NetcdfSave: + params = [[1, 600], [False, True]] + param_names = ["cubesphere-N", "is_unstructured"] + + def setup(self, n_cubesphere, is_unstructured): + self.cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=is_unstructured + ) + + def _save_data(self, cube, do_copy=True): + if do_copy: + # Copy the cube, to avoid distorting the results by changing it + # Because we known that older Iris code realises lazy coords + cube = cube.copy() + save(cube, "tmp.nc") + + def _save_mesh(self, cube): + # In this case, we are happy that the mesh is *not* modified + save_mesh(cube.mesh, "mesh.nc") + + def time_netcdf_save_cube(self, n_cubesphere, is_unstructured): + self._save_data(self.cube) + + def time_netcdf_save_mesh(self, n_cubesphere, is_unstructured): + if is_unstructured: + self._save_mesh(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_netcdf_save(self, n_cubesphere, is_unstructured): + # Don't need to copy the cube here since track_ benchmarks don't + # do repeats between self.setup() calls. + self._save_data(self.cube, do_copy=False) diff --git a/benchmarks/benchmarks/sperf/__init__.py b/benchmarks/benchmarks/sperf/__init__.py new file mode 100644 index 0000000000..eccad56f6f --- /dev/null +++ b/benchmarks/benchmarks/sperf/__init__.py @@ -0,0 +1,43 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. + +SPerf = assessing performance against a series of increasingly large LFRic +datasets. +""" +from iris import load_cube + +# TODO: remove uses of PARSE_UGRID_ON_LOAD once UGRID parsing is core behaviour. +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from ..generate_data.ugrid import make_cubesphere_testfile + + +class FileMixin: + """For use in any benchmark classes that work on a file.""" + + # Allows time for large file generation. + timeout = 3600.0 + # Largest file with these params: ~90GB. + # Total disk space: ~410GB. + params = [ + [12, 384, 640, 960, 1280, 1668], + [1, 36, 72], + [1, 3, 10], + ] + param_names = ["cubesphere_C", "N levels", "N time steps"] + # cubesphere_C: notation refers to faces per panel. + # e.g. C1 is 6 faces, 8 nodes + + def setup(self, c_size, n_levels, n_times): + self.file_path = make_cubesphere_testfile( + c_size=c_size, n_levels=n_levels, n_times=n_times + ) + + def load_cube(self): + with PARSE_UGRID_ON_LOAD.context(): + return load_cube(str(self.file_path)) diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py new file mode 100644 index 0000000000..d3d128c7d8 --- /dev/null +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -0,0 +1,257 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +import os.path + +from dask import array as da +import numpy as np + +from iris import load, load_cube, save +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid.utils import recombine_submeshes + +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import BENCHMARK_DATA, make_cube_like_2d_cubesphere + + +class Mixin: + # Characterise time taken + memory-allocated, for various stages of combine + # operations on cubesphere-like test data. + timeout = 300.0 + params = [100, 200, 300, 500, 1000, 1668] + param_names = ["cubesphere_C"] + # Fix result units for the tracking benchmarks. + unit = "Mb" + temp_save_path = BENCHMARK_DATA / "tmp.nc" + + def _parametrised_cache_filename(self, n_cubesphere, content_name): + return BENCHMARK_DATA / f"cube_C{n_cubesphere}_{content_name}.nc" + + def _make_region_cubes(self, full_mesh_cube): + """Make a fixed number of region cubes from a full meshcube.""" + # Divide the cube into regions. + n_faces = full_mesh_cube.shape[-1] + # Start with a simple list of face indices + # first extend to multiple of 5 + n_faces_5s = 5 * ((n_faces + 1) // 5) + i_faces = np.arange(n_faces_5s, dtype=int) + # reshape (5N,) to (N, 5) + i_faces = i_faces.reshape((n_faces_5s // 5, 5)) + # reorder [2, 3, 4, 0, 1] within each block of 5 + i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) + # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] + i_faces = i_faces.flatten() + # reduce back to orignal length, wrap any overflows into valid range + i_faces = i_faces[:n_faces] % n_faces + + # Divide into regions -- always slightly uneven, since 7 doesn't divide + n_regions = 7 + n_facesperregion = n_faces // n_regions + i_face_regions = (i_faces // n_facesperregion) % n_regions + region_inds = [ + np.where(i_face_regions == i_region)[0] + for i_region in range(n_regions) + ] + # NOTE: this produces 7 regions, with near-adjacent value ranges but + # with some points "moved" to an adjacent region. + # Also, region-0 is bigger (because of not dividing by 7). + + # Finally, make region cubes with these indices. + region_cubes = [full_mesh_cube[..., inds] for inds in region_inds] + return region_cubes + + def setup_cache(self): + """Cache all the necessary source data on disk.""" + + # Control dask, to minimise memory usage + allow largest data. + self.fix_dask_settings() + + for n_cubesphere in self.params: + # Do for each parameter, since "setup_cache" is NOT parametrised + mesh_cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=True + ) + # Save to files which include the parameter in the names. + save( + mesh_cube, + self._parametrised_cache_filename(n_cubesphere, "meshcube"), + ) + region_cubes = self._make_region_cubes(mesh_cube) + save( + region_cubes, + self._parametrised_cache_filename(n_cubesphere, "regioncubes"), + ) + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=True + ): + """ + The combine-tests "standard" setup operation. + + Load the source cubes (full-mesh + region) from disk. + These are specific to the cubesize parameter. + The data is cached on disk rather than calculated, to avoid any + pre-loading of the process memory allocation. + + If 'imaginary_data' is set (default), the region cubes data is replaced + with lazy data in the form of a da.zeros(). Otherwise, the region data + is lazy data from the files. + + If 'create_result_cube' is set, create "self.combined_cube" containing + the (still lazy) result. + + NOTE: various test classes override + extend this. + + """ + + # Load source cubes (full-mesh and regions) + with PARSE_UGRID_ON_LOAD.context(): + self.full_mesh_cube = load_cube( + self._parametrised_cache_filename(n_cubesphere, "meshcube") + ) + self.region_cubes = load( + self._parametrised_cache_filename(n_cubesphere, "regioncubes") + ) + + # Remove all var-names from loaded cubes, which can otherwise cause + # problems. Also implement 'imaginary' data. + for cube in self.region_cubes + [self.full_mesh_cube]: + cube.var_name = None + for coord in cube.coords(): + coord.var_name = None + if imaginary_data: + # Replace cube data (lazy file data) with 'imaginary' data. + # This has the same lazy-array attributes, but is allocated by + # creating chunks on demand instead of loading from file. + data = cube.lazy_data() + data = da.zeros( + data.shape, dtype=data.dtype, chunks=data.chunksize + ) + cube.data = data + + if create_result_cube: + self.recombined_cube = self.recombine() + + # Fix dask usage mode for all the subsequent performance tests. + self.fix_dask_settings() + + def teardown(self, _): + self.temp_save_path.unlink(missing_ok=True) + + def fix_dask_settings(self): + """ + Fix "standard" dask behaviour for time+space testing. + + Currently this is single-threaded mode, with known chunksize, + which is optimised for space saving so we can test largest data. + + """ + + import dask.config as dcfg + + # Use single-threaded, to avoid process-switching costs and minimise memory usage. + # N.B. generally may be slower, but use less memory ? + dcfg.set(scheduler="single-threaded") + # Configure iris._lazy_data.as_lazy_data to aim for 100Mb chunks + dcfg.set({"array.chunk-size": "128Mib"}) + + def recombine(self): + # A handy general shorthand for the main "combine" operation. + result = recombine_submeshes( + self.full_mesh_cube, + self.region_cubes, + index_coord_name="i_mesh_face", + ) + return result + + def save_recombined_cube(self): + save(self.recombined_cube, self.temp_save_path) + + +@on_demand_benchmark +class CreateCube(Mixin): + """ + Time+memory costs of creating a combined-regions cube. + + The result is lazy, and we don't do the actual calculation. + + """ + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=False + ): + # In this case only, do *not* create the result cube. + # That is the operation we want to test. + super().setup(n_cubesphere, imaginary_data, create_result_cube) + + def time_create_combined_cube(self, n_cubesphere): + self.recombine() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_create_combined_cube(self, n_cubesphere): + self.recombine() + + +@on_demand_benchmark +class ComputeRealData(Mixin): + """ + Time+memory costs of computing combined-regions data. + """ + + def time_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + +@on_demand_benchmark +class SaveData(Mixin): + """ + Test saving *only*, having replaced the input cube data with 'imaginary' + array data, so that input data is not loaded from disk during the save + operation. + + """ + + def time_save(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + self.save_recombined_cube() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save(self, n_cubesphere): + self.save_recombined_cube() + + def track_filesize_saved(self, n_cubesphere): + self.save_recombined_cube() + return self.temp_save_path.stat().st_size * 1.0e-6 + + +@on_demand_benchmark +class FileStreamedCalc(Mixin): + """ + Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region + cubes on disk. + """ + + def setup( + self, n_cubesphere, imaginary_data=False, create_result_cube=True + ): + # In this case only, do *not* replace the loaded regions data with + # 'imaginary' data, as we want to test file-to-file calculation+save. + super().setup(n_cubesphere, imaginary_data, create_result_cube) + + def time_stream_file2file(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + self.save_recombined_cube() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_stream_file2file(self, n_cubesphere): + self.save_recombined_cube() diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py new file mode 100644 index 0000000000..85c73ab92b --- /dev/null +++ b/benchmarks/benchmarks/sperf/equality.py @@ -0,0 +1,36 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import FileMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class CubeEquality(FileMixin): + """ + Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. + + Uses :class:`FileMixin` as the realistic case will be comparing + :class:`~iris.cube.Cube`\\ s that have been loaded from file. + + """ + + # Cut down paremt parameters. + params = [FileMixin.params[0]] + + def setup(self, c_size, n_levels=1, n_times=1): + super().setup(c_size, n_levels, n_times) + self.cube = self.load_cube() + self.other_cube = self.load_cube() + + def peakmem_eq(self, n_cube): + _ = self.cube == self.other_cube + + def time_eq(self, n_cube): + _ = self.cube == self.other_cube diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py new file mode 100644 index 0000000000..6a60355976 --- /dev/null +++ b/benchmarks/benchmarks/sperf/load.py @@ -0,0 +1,29 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import FileMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class Load(FileMixin): + def time_load_cube(self, _, __, ___): + _ = self.load_cube() + + +@on_demand_benchmark +class Realise(FileMixin): + def setup(self, c_size, n_levels, n_times): + super().setup(c_size, n_levels, n_times) + self.loaded_cube = self.load_cube() + + def time_realise_cube(self, _, __, ___): + # Don't touch loaded_cube.data - permanent realisation plays badly with + # ASV's re-run strategy. + assert self.loaded_cube.has_lazy_data() + self.loaded_cube.core_data().compute() diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py new file mode 100644 index 0000000000..dd33924c6c --- /dev/null +++ b/benchmarks/benchmarks/sperf/save.py @@ -0,0 +1,56 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +import os.path + +from iris import save +from iris.experimental.ugrid import save_mesh + +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import make_cube_like_2d_cubesphere + + +@on_demand_benchmark +class NetcdfSave: + """ + Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + + """ + + params = [[1, 100, 200, 300, 500, 1000, 1668], [False, True]] + param_names = ["cubesphere_C", "is_unstructured"] + # Fix result units for the tracking benchmarks. + unit = "Mb" + + def setup(self, n_cubesphere, is_unstructured): + self.cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=is_unstructured + ) + + def _save_cube(self, cube): + save(cube, "tmp.nc") + + def _save_mesh(self, cube): + save_mesh(cube.mesh, "mesh.nc") + + def time_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + + def time_save_mesh(self, n_cubesphere, is_unstructured): + if is_unstructured: + self._save_mesh(self.cube) + + # The filesizes make a good reference point for the 'addedmem' memory + # usage results. + def track_filesize_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + return os.path.getsize("tmp.nc") * 1.0e-6 diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py new file mode 100644 index 0000000000..5c1d10d218 --- /dev/null +++ b/benchmarks/benchmarks/trajectory.py @@ -0,0 +1,48 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Trajectory benchmark test + +""" + +# import iris tests first so that some things can be initialised before +# importing anything else +from iris import tests # isort:skip + +import numpy as np + +import iris +from iris.analysis.trajectory import interpolate + + +class TrajectoryInterpolation: + def setup(self) -> None: + # Prepare a cube and a template + + cube_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_xyt.nc"] + ) + self.cube = iris.load_cube(cube_file_path) + + trajectory = np.array( + [np.array((-50 + i, -50 + i)) for i in range(100)] + ) + self.sample_points = [ + ("longitude", trajectory[:, 0]), + ("latitude", trajectory[:, 1]), + ] + + def time_trajectory_linear(self) -> None: + # Regrid the cube onto the template. + out_cube = interpolate(self.cube, self.sample_points, method="linear") + # Realise the data + out_cube.data + + def time_trajectory_nearest(self) -> None: + # Regrid the cube onto the template. + out_cube = interpolate(self.cube, self.sample_points, method="nearest") + # Realise the data + out_cube.data diff --git a/benchmarks/nox_asv_plugin.py b/benchmarks/nox_asv_plugin.py deleted file mode 100644 index 6c9ce14272..0000000000 --- a/benchmarks/nox_asv_plugin.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -ASV plug-in providing an alternative ``Environment`` subclass, which uses Nox -for environment management. - -""" -from importlib.util import find_spec -from pathlib import Path -from shutil import copy2, copytree -from tempfile import TemporaryDirectory - -from asv import util as asv_util -from asv.config import Config -from asv.console import log -from asv.environment import get_env_name -from asv.plugins.conda import Conda, _find_conda -from asv.repo import Repo, get_repo - - -class NoxConda(Conda): - """ - Manage a Conda environment using Nox, updating environment at each commit. - - Defers environment management to the project's noxfile, which must be able - to create/update the benchmarking environment using ``nox --install-only``, - with the ``--session`` specified in ``asv.conf.json.nox_session_name``. - - Notes - ----- - If not all benchmarked commits support this use of Nox: the plugin will - need to be modified to prep the environment in other ways. - - """ - - tool_name = "nox-conda" - - @classmethod - def matches(cls, python: str) -> bool: - """Used by ASV to work out if this type of environment can be used.""" - result = find_spec("nox") is not None - if result: - result = super().matches(python) - - if result: - message = ( - f"NOTE: ASV env match check incomplete. Not possible to know " - f"if selected Nox session (asv.conf.json.nox_session_name) is " - f"compatible with ``--python={python}`` until project is " - f"checked out." - ) - log.warning(message) - - return result - - def __init__(self, conf: Config, python: str, requirements: dict) -> None: - """ - Parameters - ---------- - conf: Config instance - - python : str - Version of Python. Must be of the form "MAJOR.MINOR". - - requirements : dict - Dictionary mapping a PyPI package name to a version - identifier string. - - """ - from nox.sessions import _normalize_path - - # Need to checkout the project BEFORE the benchmark run - to access a noxfile. - self.project_temp_checkout = TemporaryDirectory( - prefix="nox_asv_checkout_" - ) - repo = get_repo(conf) - repo.checkout(self.project_temp_checkout.name, conf.nox_setup_commit) - self.noxfile_rel_path = conf.noxfile_rel_path - self.setup_noxfile = ( - Path(self.project_temp_checkout.name) / self.noxfile_rel_path - ) - self.nox_session_name = conf.nox_session_name - - # Some duplication of parent code - need these attributes BEFORE - # running inherited code. - self._python = python - self._requirements = requirements - self._env_dir = conf.env_dir - - # Prepare the actual environment path, to override self._path. - nox_envdir = str(Path(self._env_dir).absolute() / self.hashname) - nox_friendly_name = self._get_nox_session_name(python) - self._nox_path = Path(_normalize_path(nox_envdir, nox_friendly_name)) - - # For storing any extra conda requirements from asv.conf.json. - self._extra_reqs_path = self._nox_path / "asv-extra-reqs.yaml" - - super().__init__(conf, python, requirements) - - @property - def _path(self) -> str: - """ - Using a property to override getting and setting in parent classes - - unable to modify parent classes as this is a plugin. - - """ - return str(self._nox_path) - - @_path.setter - def _path(self, value) -> None: - """Enforce overriding of this variable by disabling modification.""" - pass - - @property - def name(self) -> str: - """Overridden to prevent inclusion of user input requirements.""" - return get_env_name(self.tool_name, self._python, {}) - - def _get_nox_session_name(self, python: str) -> str: - nox_cmd_substring = ( - f"--noxfile={self.setup_noxfile} " - f"--session={self.nox_session_name} " - f"--python={python}" - ) - - list_output = asv_util.check_output( - ["nox", "--list", *nox_cmd_substring.split(" ")], - display_error=False, - dots=False, - ) - list_output = list_output.split("\n") - list_matches = list(filter(lambda s: s.startswith("*"), list_output)) - matches_count = len(list_matches) - - if matches_count == 0: - message = f"No Nox sessions found for: {nox_cmd_substring} ." - log.error(message) - raise RuntimeError(message) - elif matches_count > 1: - message = ( - f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} ." - ) - log.error(message) - raise RuntimeError(message) - else: - line = list_matches[0] - session_name = line.split(" ")[1] - assert isinstance(session_name, str) - return session_name - - def _nox_prep_env(self, setup: bool = False) -> None: - message = f"Running Nox environment update for: {self.name}" - log.info(message) - - build_root_path = Path(self._build_root) - env_path = Path(self._path) - - def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: - """For copying between self._path and a temporary cache.""" - asv_files = list(src_parent.glob("asv*")) - # build_root_path.name usually == "project" . - asv_files += [src_parent / build_root_path.name] - for src_path in asv_files: - dst_path = dst_parent / src_path.name - if not dst_path.exists(): - # Only cache-ing in case Nox has rebuilt the env @ - # self._path. If the dst_path already exists: rebuilding - # hasn't happened. Also a non-issue when copying in the - # reverse direction because the cache dir is temporary. - if src_path.is_dir(): - func = copytree - else: - func = copy2 - func(src_path, dst_path) - - with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache: - asv_cache_path = Path(asv_cache) - if setup: - noxfile = self.setup_noxfile - else: - # Cache all of ASV's files as Nox may remove and re-build the environment. - copy_asv_files(env_path, asv_cache_path) - # Get location of noxfile in cache. - noxfile_original = ( - build_root_path / self._repo_subdir / self.noxfile_rel_path - ) - noxfile_subpath = noxfile_original.relative_to( - build_root_path.parent - ) - noxfile = asv_cache_path / noxfile_subpath - - nox_cmd = [ - "nox", - f"--noxfile={noxfile}", - # Place the env in the ASV env directory, instead of the default. - f"--envdir={env_path.parent}", - f"--session={self.nox_session_name}", - f"--python={self._python}", - "--install-only", - "--no-error-on-external-run", - "--verbose", - ] - - _ = asv_util.check_output(nox_cmd) - if not env_path.is_dir(): - message = f"Expected Nox environment not found: {env_path}" - log.error(message) - raise RuntimeError(message) - - if not setup: - # Restore ASV's files from the cache (if necessary). - copy_asv_files(asv_cache_path, env_path) - - def _setup(self) -> None: - """Used for initial environment creation - mimics parent method where possible.""" - try: - self.conda = _find_conda() - except IOError as e: - raise asv_util.UserError(str(e)) - if find_spec("nox") is None: - raise asv_util.UserError("Module not found: nox") - - message = f"Creating Nox-Conda environment for {self.name} ." - log.info(message) - - try: - self._nox_prep_env(setup=True) - finally: - # No longer need the setup checkout now that the environment has been built. - self.project_temp_checkout.cleanup() - - conda_args, pip_args = self._get_requirements(self.conda) - if conda_args or pip_args: - message = ( - "Ignoring user input package requirements. Benchmark " - "environment management is exclusively performed by Nox." - ) - log.warning(message) - - def checkout_project(self, repo: Repo, commit_hash: str) -> None: - """Check out the working tree of the project at given commit hash.""" - super().checkout_project(repo, commit_hash) - self._nox_prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) diff --git a/docs/Makefile b/docs/Makefile index 44c89206d2..f4c8d0b7f4 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -55,8 +55,3 @@ linkcheck: echo "Running linkcheck in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) linkcheck); done -gallerytest: - @echo - @echo "Running \"gallery\" tests..." - @echo - python -m unittest discover -v -t . diff --git a/docs/gallery_code/README.rst b/docs/gallery_code/README.rst index 720fd1e6f6..85bf0552b4 100644 --- a/docs/gallery_code/README.rst +++ b/docs/gallery_code/README.rst @@ -1,3 +1,5 @@ +.. _gallery_index: + Gallery ======= diff --git a/docs/gallery_code/general/README.rst b/docs/gallery_code/general/README.rst index c846755f1e..3a48e7cd8e 100644 --- a/docs/gallery_code/general/README.rst +++ b/docs/gallery_code/general/README.rst @@ -1,2 +1,3 @@ General ------- + diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 025f395789..4b817aea66 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -57,7 +57,7 @@ import datetime -from cf_units import CALENDAR_GREGORIAN, Unit +from cf_units import CALENDAR_STANDARD, Unit import matplotlib.pyplot as plt import numpy as np @@ -225,7 +225,7 @@ def NAME_to_cube(filenames, callback): # define the time unit and use it to serialise the datetime for the # time coordinate - time_unit = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) + time_unit = Unit("hours since epoch", calendar=CALENDAR_STANDARD) time_coord = icoords.AuxCoord( time_unit.date2num(field_headings["time"]), standard_name="time", diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py new file mode 100644 index 0000000000..08a9578e63 --- /dev/null +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -0,0 +1,89 @@ +""" +Zonal Mean Diagram of Air Temperature +===================================== +This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. +""" + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +from mpl_toolkits.axes_grid1 import make_axes_locatable +import numpy as np + +import iris +from iris.analysis import MEAN +import iris.plot as iplt +import iris.quickplot as qplt + + +def main(): + + # Loads air_temp.pp and "collapses" longitude into a single, average value. + fname = iris.sample_data_path("air_temp.pp") + temperature = iris.load_cube(fname) + collapsed_temp = temperature.collapsed("longitude", MEAN) + + # Set y-axes with -90 and 90 limits and steps of 15 per tick. + start, stop, step = -90, 90, 15 + yticks = np.arange(start, stop + step, step) + ylim = [start, stop] + + # Plot "temperature" on a cartographic plot and set the ticks and titles + # on the axes. + fig = plt.figure(figsize=[12, 4]) + + ax1 = fig.add_subplot(111, projection=ccrs.PlateCarree()) + im = iplt.contourf(temperature, cmap="RdYlBu_r") + ax1.coastlines() + ax1.gridlines() + ax1.set_xticks([-180, -90, 0, 90, 180]) + ax1.set_yticks(yticks) + ax1.set_title("Air Temperature") + ax1.set_ylabel(f"Latitude / {temperature.coord('latitude').units}") + ax1.set_xlabel(f"Longitude / {temperature.coord('longitude').units}") + ax1.set_ylim(*ylim) + + # Create a Matplotlib AxesDivider object to allow alignment of other + # Axes objects. + divider = make_axes_locatable(ax1) + + # Gives the air temperature bar size, colour and a title. + ax2 = divider.new_vertical( + size="5%", pad=0.5, axes_class=plt.Axes, pack_start=True + ) # creates 2nd axis + fig.add_axes(ax2) + cbar = plt.colorbar( + im, cax=ax2, orientation="horizontal" + ) # puts colour bar on second axis + cbar.ax.set_xlabel(f"{temperature.units}") # labels colour bar + + # Plot "collapsed_temp" on the mean graph and set the ticks and titles + # on the axes. + ax3 = divider.new_horizontal( + size="30%", pad=0.4, axes_class=plt.Axes + ) # create 3rd axis + fig.add_axes(ax3) + qplt.plot( + collapsed_temp, collapsed_temp.coord("latitude") + ) # plots temperature collapsed over longitude against latitude + ax3.axhline(0, color="k", linewidth=0.5) + + # Creates zonal mean details + ax3.set_title("Zonal Mean") + ax3.yaxis.set_label_position("right") + ax3.yaxis.tick_right() + ax3.set_yticks(yticks) + ax3.grid() + + # Round each tick for the third ax to the nearest 20 (ready for use). + data_max = collapsed_temp.data.max() + x_max = data_max - data_max % -20 + data_min = collapsed_temp.data.min() + x_min = data_min - data_min % 20 + ax3.set_xlim(x_min, x_max) + ax3.set_ylim(*ylim) + + plt.show() + + +if __name__ == "__main__": + main() diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index c3c056eb4a..b09040c64e 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -30,7 +30,7 @@ def main(): # To illustrate the full range of barbs, scale the wind speed up to pretend # that a storm is passing over - magnitude = (uwind ** 2 + vwind ** 2) ** 0.5 + magnitude = (uwind**2 + vwind**2) ** 0.5 magnitude.convert_units("knot") max_speed = magnitude.collapsed( ("latitude", "longitude"), iris.analysis.MAX @@ -41,7 +41,7 @@ def main(): vwind = vwind / max_speed * max_desired # Create a cube containing the wind speed - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") windspeed.convert_units("knot") diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index fd03f54205..40d9d0da00 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -27,7 +27,7 @@ def main(): vwind = iris.load_cube(infile, "y_wind") # Create a cube containing the wind speed. - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") # Plot the wind speed as a contour plot. diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index 4bfee5ac8e..b19f37e1f5 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -13,7 +13,7 @@ import iris import iris.plot as iplt import iris.quickplot as qplt -from iris.util import promote_aux_coord_to_dim_coord +from iris.util import equalise_attributes, promote_aux_coord_to_dim_coord def main(): @@ -21,16 +21,15 @@ def main(): fname = iris.sample_data_path("NEMO/nemo_1m_*.nc") cubes = iris.load(fname) - # Some attributes are unique to each file and must be blanked - # to allow concatenation. - differing_attrs = ["file_name", "name", "timeStamp", "TimeStamp"] - for cube in cubes: - for attribute in differing_attrs: - cube.attributes[attribute] = "" - - # The cubes still cannot be concatenated because their time dimension is - # time_counter rather than time. time needs to be promoted to allow + # Some attributes are unique to each file and must be removed to allow # concatenation. + equalise_attributes(cubes) + + # The cubes still cannot be concatenated because their dimension coordinate + # is "time_counter", which has the same value for each cube. concatenate + # needs distinct values in order to create a new DimCoord for the output + # cube. Here, each cube has a "time" auxiliary coordinate, and these do + # have distinct values, so we can promote them to allow concatenation. for cube in cubes: promote_aux_coord_to_dim_coord(cube, "time") diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py new file mode 100644 index 0000000000..a218b305a2 --- /dev/null +++ b/docs/gallery_tests/conftest.py @@ -0,0 +1,67 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +"""Pytest fixtures for the gallery tests.""" + +import pathlib + +import matplotlib.pyplot as plt +import pytest + +import iris + +CURRENT_DIR = pathlib.Path(__file__).resolve() +GALLERY_DIR = CURRENT_DIR.parents[1] / "gallery_code" + + +@pytest.fixture +def image_setup_teardown(): + """ + Setup and teardown fixture. + + Ensures all figures are closed before and after test to prevent one test + polluting another if it fails with a figure unclosed. + + """ + plt.close("all") + yield + plt.close("all") + + +@pytest.fixture +def import_patches(monkeypatch): + """ + Replace plt.show() with a function that does nothing, also add all the + gallery examples to sys.path. + + """ + + def no_show(): + pass + + monkeypatch.setattr(plt, "show", no_show) + + for example_dir in GALLERY_DIR.iterdir(): + if example_dir.is_dir(): + monkeypatch.syspath_prepend(example_dir) + + yield + + +@pytest.fixture +def iris_future_defaults(): + """ + Create a fixture which resets all the iris.FUTURE settings to the defaults, + as otherwise changes made in one test can affect subsequent ones. + + """ + # Run with all default settings in iris.FUTURE. + default_future_kwargs = iris.Future().__dict__.copy() + for dead_option in iris.Future.deprecated_options: + # Avoid a warning when setting these ! + del default_future_kwargs[dead_option] + with iris.FUTURE.context(**default_future_kwargs): + yield diff --git a/docs/gallery_tests/gallerytest_util.py b/docs/gallery_tests/gallerytest_util.py deleted file mode 100644 index eb2736f194..0000000000 --- a/docs/gallery_tests/gallerytest_util.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -""" -Provides context managers which are fundamental to the ability -to run the gallery tests. - -""" - -import contextlib -import os.path -import sys -import warnings - -import matplotlib.pyplot as plt - -import iris -from iris._deprecation import IrisDeprecation -import iris.plot as iplt -import iris.quickplot as qplt - -GALLERY_DIRECTORY = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "gallery_code" -) -GALLERY_DIRECTORIES = [ - os.path.join(GALLERY_DIRECTORY, the_dir) - for the_dir in os.listdir(GALLERY_DIRECTORY) -] - - -@contextlib.contextmanager -def add_gallery_to_path(): - """ - Creates a context manager which can be used to add the iris gallery - to the PYTHONPATH. The gallery entries are only importable throughout the lifetime - of this context manager. - - """ - orig_sys_path = sys.path - sys.path = sys.path[:] - sys.path += GALLERY_DIRECTORIES - yield - sys.path = orig_sys_path - - -@contextlib.contextmanager -def show_replaced_by_check_graphic(test_case): - """ - Creates a context manager which can be used to replace the functionality - of matplotlib.pyplot.show with a function which calls the check_graphic - method on the given test_case (iris.tests.IrisTest.check_graphic). - - """ - - def replacement_show(): - # form a closure on test_case and tolerance - test_case.check_graphic() - - orig_show = plt.show - plt.show = iplt.show = qplt.show = replacement_show - yield - plt.show = iplt.show = qplt.show = orig_show - - -@contextlib.contextmanager -def fail_any_deprecation_warnings(): - """ - Create a context in which any deprecation warning will cause an error. - - The context also resets all the iris.FUTURE settings to the defaults, as - otherwise changes made in one test can affect subsequent ones. - - """ - with warnings.catch_warnings(): - # Detect and error all and any Iris deprecation warnings. - warnings.simplefilter("error", IrisDeprecation) - # Run with all default settings in iris.FUTURE. - default_future_kwargs = iris.Future().__dict__.copy() - for dead_option in iris.Future.deprecated_options: - # Avoid a warning when setting these ! - del default_future_kwargs[dead_option] - with iris.FUTURE.context(**default_future_kwargs): - yield diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py new file mode 100644 index 0000000000..0d0793a7da --- /dev/null +++ b/docs/gallery_tests/test_gallery_examples.py @@ -0,0 +1,44 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +import importlib + +import matplotlib.pyplot as plt +import pytest + +from iris.tests import _RESULT_PATH +from iris.tests.graphics import check_graphic + +from .conftest import GALLERY_DIR + + +def gallery_examples(): + """Generator to yield all current gallery examples.""" + + for example_file in GALLERY_DIR.glob("*/plot*.py"): + yield example_file.stem + + +@pytest.mark.filterwarnings("error::iris.IrisDeprecation") +@pytest.mark.parametrize("example", gallery_examples()) +def test_plot_example( + example, + image_setup_teardown, + import_patches, + iris_future_defaults, +): + """Test that all figures from example code match KGO.""" + + module = importlib.import_module(example) + + # Run example. + module.main() + # Loop through open figures and set each to be the current figure so check_graphic + # will find it. + for fig_num in plt.get_fignums(): + plt.figure(fig_num) + image_id = f"gallery_tests.test_{example}.{fig_num - 1}" + check_graphic(image_id, _RESULT_PATH) diff --git a/docs/gallery_tests/test_plot_COP_1d.py b/docs/gallery_tests/test_plot_COP_1d.py deleted file mode 100644 index 9771e10fb1..0000000000 --- a/docs/gallery_tests/test_plot_COP_1d.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCOP1DPlot(tests.GraphicsTest): - """Test the COP_1d_plot gallery code.""" - - def test_plot_COP_1d(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_COP_1d - with show_replaced_by_check_graphic(self): - plot_COP_1d.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_COP_maps.py b/docs/gallery_tests/test_plot_COP_maps.py deleted file mode 100644 index a01e12527f..0000000000 --- a/docs/gallery_tests/test_plot_COP_maps.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCOPMaps(tests.GraphicsTest): - """Test the COP_maps gallery code.""" - - def test_plot_cop_maps(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_COP_maps - with show_replaced_by_check_graphic(self): - plot_COP_maps.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_SOI_filtering.py b/docs/gallery_tests/test_plot_SOI_filtering.py deleted file mode 100644 index 1da731122a..0000000000 --- a/docs/gallery_tests/test_plot_SOI_filtering.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestSOIFiltering(tests.GraphicsTest): - """Test the SOI_filtering gallery code.""" - - def test_plot_soi_filtering(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_SOI_filtering - with show_replaced_by_check_graphic(self): - plot_SOI_filtering.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_TEC.py b/docs/gallery_tests/test_plot_TEC.py deleted file mode 100644 index cfc1fb8eec..0000000000 --- a/docs/gallery_tests/test_plot_TEC.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestTEC(tests.GraphicsTest): - """Test the TEC gallery code.""" - - def test_plot_TEC(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_TEC - with show_replaced_by_check_graphic(self): - plot_TEC.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_anomaly_log_colouring.py b/docs/gallery_tests/test_plot_anomaly_log_colouring.py deleted file mode 100644 index 41f76cc774..0000000000 --- a/docs/gallery_tests/test_plot_anomaly_log_colouring.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestAnomalyLogColouring(tests.GraphicsTest): - """Test the anomaly colouring gallery code.""" - - def test_plot_anomaly_log_colouring(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_anomaly_log_colouring - with show_replaced_by_check_graphic(self): - plot_anomaly_log_colouring.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_atlantic_profiles.py b/docs/gallery_tests/test_plot_atlantic_profiles.py deleted file mode 100644 index fdcb5fb1d1..0000000000 --- a/docs/gallery_tests/test_plot_atlantic_profiles.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestAtlanticProfiles(tests.GraphicsTest): - """Test the atlantic_profiles gallery code.""" - - def test_plot_atlantic_profiles(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_atlantic_profiles - with show_replaced_by_check_graphic(self): - plot_atlantic_profiles.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_coriolis.py b/docs/gallery_tests/test_plot_coriolis.py deleted file mode 100644 index 2e4cea8a74..0000000000 --- a/docs/gallery_tests/test_plot_coriolis.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests - -from . import gallerytest_util - -with gallerytest_util.add_gallery_to_path(): - import plot_coriolis - - -class TestCoriolisPlot(tests.GraphicsTest): - """Test the Coriolis Plot gallery code.""" - - def test_plot_coriolis(self): - with gallerytest_util.show_replaced_by_check_graphic(self): - plot_coriolis.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_cross_section.py b/docs/gallery_tests/test_plot_cross_section.py deleted file mode 100644 index b0878d10bc..0000000000 --- a/docs/gallery_tests/test_plot_cross_section.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCrossSection(tests.GraphicsTest): - """Test the cross_section gallery code.""" - - def test_plot_cross_section(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_cross_section - with show_replaced_by_check_graphic(self): - plot_cross_section.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_custom_aggregation.py b/docs/gallery_tests/test_plot_custom_aggregation.py deleted file mode 100644 index 9d0a40dd3c..0000000000 --- a/docs/gallery_tests/test_plot_custom_aggregation.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCustomAggregation(tests.GraphicsTest): - """Test the custom aggregation gallery code.""" - - def test_plot_custom_aggregation(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_custom_aggregation - with show_replaced_by_check_graphic(self): - plot_custom_aggregation.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_custom_file_loading.py b/docs/gallery_tests/test_plot_custom_file_loading.py deleted file mode 100644 index 4d0d603a22..0000000000 --- a/docs/gallery_tests/test_plot_custom_file_loading.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCustomFileLoading(tests.GraphicsTest): - """Test the custom_file_loading gallery code.""" - - def test_plot_custom_file_loading(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_custom_file_loading - with show_replaced_by_check_graphic(self): - plot_custom_file_loading.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_deriving_phenomena.py b/docs/gallery_tests/test_plot_deriving_phenomena.py deleted file mode 100644 index ef2f8cec87..0000000000 --- a/docs/gallery_tests/test_plot_deriving_phenomena.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestDerivingPhenomena(tests.GraphicsTest): - """Test the deriving_phenomena gallery code.""" - - def test_plot_deriving_phenomena(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_deriving_phenomena - with show_replaced_by_check_graphic(self): - plot_deriving_phenomena.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_global_map.py b/docs/gallery_tests/test_plot_global_map.py deleted file mode 100644 index 16f769deae..0000000000 --- a/docs/gallery_tests/test_plot_global_map.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestGlobalMap(tests.GraphicsTest): - """Test the global_map gallery code.""" - - def test_plot_global_map(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_global_map - with show_replaced_by_check_graphic(self): - plot_global_map.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_hovmoller.py b/docs/gallery_tests/test_plot_hovmoller.py deleted file mode 100644 index 29c0e72e05..0000000000 --- a/docs/gallery_tests/test_plot_hovmoller.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestGlobalMap(tests.GraphicsTest): - """Test the hovmoller gallery code.""" - - def test_plot_hovmoller(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_hovmoller - with show_replaced_by_check_graphic(self): - plot_hovmoller.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_inset.py b/docs/gallery_tests/test_plot_inset.py deleted file mode 100644 index 739e0a3224..0000000000 --- a/docs/gallery_tests/test_plot_inset.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestInsetPlot(tests.GraphicsTest): - """Test the inset plot gallery code.""" - - def test_plot_inset(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_inset - with show_replaced_by_check_graphic(self): - plot_inset.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_lagged_ensemble.py b/docs/gallery_tests/test_plot_lagged_ensemble.py deleted file mode 100644 index f0a0201613..0000000000 --- a/docs/gallery_tests/test_plot_lagged_ensemble.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLaggedEnsemble(tests.GraphicsTest): - """Test the lagged ensemble gallery code.""" - - def test_plot_lagged_ensemble(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_lagged_ensemble - with show_replaced_by_check_graphic(self): - plot_lagged_ensemble.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_lineplot_with_legend.py b/docs/gallery_tests/test_plot_lineplot_with_legend.py deleted file mode 100644 index 5677667026..0000000000 --- a/docs/gallery_tests/test_plot_lineplot_with_legend.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLineplotWithLegend(tests.GraphicsTest): - """Test the lineplot_with_legend gallery code.""" - - def test_plot_lineplot_with_legend(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_lineplot_with_legend - with show_replaced_by_check_graphic(self): - plot_lineplot_with_legend.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_load_nemo.py b/docs/gallery_tests/test_plot_load_nemo.py deleted file mode 100644 index f250dc46b4..0000000000 --- a/docs/gallery_tests/test_plot_load_nemo.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLoadNemo(tests.GraphicsTest): - """Test the load_nemo gallery code.""" - - def test_plot_load_nemo(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_load_nemo - with show_replaced_by_check_graphic(self): - plot_load_nemo.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_orca_projection.py b/docs/gallery_tests/test_plot_orca_projection.py deleted file mode 100644 index c4058c996e..0000000000 --- a/docs/gallery_tests/test_plot_orca_projection.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestOrcaProjection(tests.GraphicsTest): - """Test the orca projection gallery code.""" - - def test_plot_orca_projection(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_orca_projection - with show_replaced_by_check_graphic(self): - plot_orca_projection.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_polar_stereo.py b/docs/gallery_tests/test_plot_polar_stereo.py deleted file mode 100644 index 4d32ee5830..0000000000 --- a/docs/gallery_tests/test_plot_polar_stereo.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestPolarStereo(tests.GraphicsTest): - """Test the polar_stereo gallery code.""" - - def test_plot_polar_stereo(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_polar_stereo - with show_replaced_by_check_graphic(self): - plot_polar_stereo.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_polynomial_fit.py b/docs/gallery_tests/test_plot_polynomial_fit.py deleted file mode 100644 index b522dcf43c..0000000000 --- a/docs/gallery_tests/test_plot_polynomial_fit.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestPolynomialFit(tests.GraphicsTest): - """Test the polynomial_fit gallery code.""" - - def test_plot_polynomial_fit(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_polynomial_fit - with show_replaced_by_check_graphic(self): - plot_polynomial_fit.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_projections_and_annotations.py b/docs/gallery_tests/test_plot_projections_and_annotations.py deleted file mode 100644 index 1c24202251..0000000000 --- a/docs/gallery_tests/test_plot_projections_and_annotations.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestProjectionsAndAnnotations(tests.GraphicsTest): - """Test the atlantic_profiles gallery code.""" - - def test_plot_projections_and_annotations(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_projections_and_annotations - with show_replaced_by_check_graphic(self): - plot_projections_and_annotations.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_rotated_pole_mapping.py b/docs/gallery_tests/test_plot_rotated_pole_mapping.py deleted file mode 100644 index cd9b04fc66..0000000000 --- a/docs/gallery_tests/test_plot_rotated_pole_mapping.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestRotatedPoleMapping(tests.GraphicsTest): - """Test the rotated_pole_mapping gallery code.""" - - def test_plot_rotated_pole_mapping(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_rotated_pole_mapping - with show_replaced_by_check_graphic(self): - plot_rotated_pole_mapping.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_wind_barbs.py b/docs/gallery_tests/test_plot_wind_barbs.py deleted file mode 100644 index 6003860a5e..0000000000 --- a/docs/gallery_tests/test_plot_wind_barbs.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestWindBarbs(tests.GraphicsTest): - """Test the wind_barbs example code.""" - - def test_wind_barbs(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_wind_barbs - with show_replaced_by_check_graphic(self): - plot_wind_barbs.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_wind_speed.py b/docs/gallery_tests/test_plot_wind_speed.py deleted file mode 100644 index ebaf97adbe..0000000000 --- a/docs/gallery_tests/test_plot_wind_speed.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestWindSpeed(tests.GraphicsTest): - """Test the wind_speed gallery code.""" - - def test_plot_wind_speed(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_wind_speed - with show_replaced_by_check_graphic(self): - plot_wind_speed.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/src/_static/Iris7_1_trim_100.png b/docs/src/_static/Iris7_1_trim_100.png deleted file mode 100644 index 2f6f80eff9..0000000000 Binary files a/docs/src/_static/Iris7_1_trim_100.png and /dev/null differ diff --git a/docs/src/_static/Iris7_1_trim_full.png b/docs/src/_static/Iris7_1_trim_full.png deleted file mode 100644 index c381aa3a89..0000000000 Binary files a/docs/src/_static/Iris7_1_trim_full.png and /dev/null differ diff --git a/docs/src/_static/README.md b/docs/src/_static/README.md new file mode 100644 index 0000000000..b9f2877a30 --- /dev/null +++ b/docs/src/_static/README.md @@ -0,0 +1,31 @@ +# Iris logos + +[![iris-logo-title.svg](iris-logo-title.svg)](iris-logo-title.svg) + +Code for generating the logos is at: +[SciTools/marketing/iris/logo/generate_logo.py](https://github.com/SciTools/marketing/blob/master/iris/logo/generate_logo.py) + +See the docstring of the `generate_logo()` function for more information. + +## Why a scripted logo? + +SVG logos are ideal for source-controlled projects: + +* Low file size, with infinitely scaling quality +* Universally recognised vector format, editable by many software packages +* XML-style content = human-readable diff when changes are made + +But Iris' logo is difficult to reproduce/edit using an SVG editor alone: + +* Includes correctly projected, low resolution coastlines +* Needs precise alignment of the 'visual centre' of the iris with the centres + of the Earth and the image + +An SVG image is simply XML format, so can be easily assembled automatically +with a script, which can also be engineered to address the above problems. + +Further advantages of using a script: + +* Parameterised text, making it easy to standardise the logo across all Iris + packages +* Can generate an animated GIF/SVG of a rotating Earth diff --git a/docs/src/_static/favicon.ico b/docs/src/_static/favicon.ico deleted file mode 100644 index 0e5f0492b4..0000000000 Binary files a/docs/src/_static/favicon.ico and /dev/null differ diff --git a/docs/src/_static/icon_api.svg b/docs/src/_static/icon_api.svg new file mode 100644 index 0000000000..841b105973 --- /dev/null +++ b/docs/src/_static/icon_api.svg @@ -0,0 +1,144 @@ + + + +image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/src/_static/icon_development.svg b/docs/src/_static/icon_development.svg new file mode 100644 index 0000000000..dbc342688c --- /dev/null +++ b/docs/src/_static/icon_development.svg @@ -0,0 +1,63 @@ + + + + + + image/svg+xml + + + + + + + + + + diff --git a/docs/src/_static/icon_instructions.svg b/docs/src/_static/icon_instructions.svg new file mode 100644 index 0000000000..62b3fc3620 --- /dev/null +++ b/docs/src/_static/icon_instructions.svg @@ -0,0 +1,162 @@ + + + +image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/src/_static/icon_new_product.svg b/docs/src/_static/icon_new_product.svg new file mode 100644 index 0000000000..f222e1e066 --- /dev/null +++ b/docs/src/_static/icon_new_product.svg @@ -0,0 +1,182 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/_static/icon_shuttle.svg b/docs/src/_static/icon_shuttle.svg new file mode 100644 index 0000000000..46ba64d2e0 --- /dev/null +++ b/docs/src/_static/icon_shuttle.svg @@ -0,0 +1,71 @@ + + + + + + image/svg+xml + + + + + + + + + + + + diff --git a/docs/src/_static/icon_support.png b/docs/src/_static/icon_support.png new file mode 100644 index 0000000000..567cdb1b2f Binary files /dev/null and b/docs/src/_static/icon_support.png differ diff --git a/docs/src/_static/icon_thumb.png b/docs/src/_static/icon_thumb.png new file mode 100644 index 0000000000..6a14875e22 Binary files /dev/null and b/docs/src/_static/icon_thumb.png differ diff --git a/docs/src/_static/iris-logo-title.png b/docs/src/_static/iris-logo-title.png deleted file mode 100644 index e517aa7784..0000000000 Binary files a/docs/src/_static/iris-logo-title.png and /dev/null differ diff --git a/docs/src/_static/iris-logo-title.svg b/docs/src/_static/iris-logo-title.svg index 60ba0a1118..5bc38bfbda 100644 --- a/docs/src/_static/iris-logo-title.svg +++ b/docs/src/_static/iris-logo-title.svg @@ -1,89 +1,107 @@ - - - - - - - - - - image/svg+xml - - - - - - - - Iris - - + + \ No newline at end of file diff --git a/docs/src/_static/iris-logo.svg b/docs/src/_static/iris-logo.svg new file mode 100644 index 0000000000..6c4bdb0e5a --- /dev/null +++ b/docs/src/_static/iris-logo.svg @@ -0,0 +1,104 @@ + + \ No newline at end of file diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index c56b720f69..326c1d4d4a 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -1,33 +1,10 @@ /* import the standard theme css */ @import url("css/theme.css"); -/* now we can add custom any css */ - -/* set the width of the logo */ -.wy-side-nav-search>a img.logo, -.wy-side-nav-search .wy-dropdown>a img.logo { - width: 12rem -} - -/* color of the logo background in the top left corner */ -.wy-side-nav-search { - background-color: lightgray; -} - -/* color of the font for the version in the top left corner */ -.wy-side-nav-search>div.version { - color: black; - font-weight: bold; -} - -/* Ensures tables do now have width scroll bars */ -table.docutils td { - white-space: unset; - word-wrap: break-word; -} +/* now we can add custom css.... */ /* Used for very strong warning */ -#slim-red-box-message { +#slim-red-box-banner { background: #ff0000; box-sizing: border-box; color: #ffffff; @@ -35,8 +12,17 @@ table.docutils td { padding: 0.5em; } -#slim-red-box-message a { +#slim-red-box-banner a { color: #ffffff; - font-weight: normal; - text-decoration:underline; + font-weight: normal; + text-decoration: underline; +} + +/* bullet point list with green ticks */ +ul.squarelist { + /* https://developer.mozilla.org/en-US/docs/Web/CSS/list-style-type */ + list-style-type: "\2705"; + margin-left: 0; + text-indent: 1em; + padding-left: 5em; } diff --git a/docs/src/_templates/custom_footer.html b/docs/src/_templates/custom_footer.html new file mode 100644 index 0000000000..f81fcc583e --- /dev/null +++ b/docs/src/_templates/custom_footer.html @@ -0,0 +1 @@ +

Built using Python {{ python_version }}.

diff --git a/docs/src/_templates/custom_sidebar_logo_version.html b/docs/src/_templates/custom_sidebar_logo_version.html new file mode 100644 index 0000000000..c9d9ac6e2e --- /dev/null +++ b/docs/src/_templates/custom_sidebar_logo_version.html @@ -0,0 +1,26 @@ +{% if on_rtd %} + {% if rtd_version == 'latest' %} + + + + {% elif rtd_version == 'stable' %} + + + + {% elif rtd_version_type == 'tag' %} + {# Covers builds for specific tags, including RC's. #} + + + + {% else %} + {# Anything else build by RTD will be the HEAD of an activated branch #} + + + + {% endif %} +{%- else %} + {# not on rtd #} + + + +{%- endif %} diff --git a/docs/src/_templates/footer.html b/docs/src/_templates/footer.html deleted file mode 100644 index 1d5fb08b78..0000000000 --- a/docs/src/_templates/footer.html +++ /dev/null @@ -1,5 +0,0 @@ -{% extends "!footer.html" %} -{% block extrafooter %} - Built using Python {{ python_version }}. - {{ super() }} -{% endblock %} diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html index 96a2e0913e..7377e866b7 100644 --- a/docs/src/_templates/layout.html +++ b/docs/src/_templates/layout.html @@ -1,16 +1,16 @@ -{% extends "!layout.html" %} +{% extends "pydata_sphinx_theme/layout.html" %} -{# This uses blocks. See: +{# This uses blocks. See: https://www.sphinx-doc.org/en/master/templating.html #} -/*---------------------------------------------------------------------------*/ -{%- block document %} - {% if READTHEDOCS and rtd_version == 'latest' %} -
+ {%- block docs_body %} + + {% if on_rtd and rtd_version == 'latest' %} +
You are viewing the latest unreleased documentation - v{{ version }}. You may prefer a + v{{ version }}. You may prefer a stable version.
@@ -19,29 +19,3 @@ {{ super() }} {%- endblock %} - -/*-----------------------------------------------------z----------------------*/ - -{% block menu %} - {{ super() }} - - {# menu_links and menu_links_name are set in conf.py (html_context) #} - - {% if menu_links %} -

- - {% if menu_links_name %} - {{ menu_links_name }} - {% else %} - External links - {% endif %} - -

-
    - {% for text, link in menu_links %} -
  • {{ text }}
  • - {% endfor %} -
- {% endif %} -{% endblock %} - diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 67fc493e3e..ec7e1efd6d 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -3,19 +3,19 @@ .. _black: https://black.readthedocs.io/en/stable/ .. _cartopy: https://github.com/SciTools/cartopy -.. _.cirrus.yml: https://github.com/SciTools/iris/blob/main/.cirrus.yml .. _flake8: https://flake8.pycqa.org/en/stable/ .. _.flake8.yml: https://github.com/SciTools/iris/blob/main/.flake8 .. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris .. _conda: https://docs.conda.io/en/latest/ .. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json .. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json -.. _discussions: https://github.com/SciTools/iris/discussions .. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account +.. _GitHub Actions: https://docs.github.com/en/actions .. _GitHub Help Documentation: https://docs.github.com/en/github -.. _Iris GitHub Discussions: https://github.com/SciTools/iris/discussions +.. _GitHub Discussions: https://github.com/SciTools/iris/discussions .. _Iris: https://github.com/SciTools/iris .. _Iris GitHub: https://github.com/SciTools/iris +.. _Iris GitHub Actions: https://github.com/SciTools/iris/actions .. _iris-sample-data: https://github.com/SciTools/iris-sample-data .. _iris-test-data: https://github.com/SciTools/iris-test-data .. _isort: https://pycqa.github.io/isort/ @@ -38,6 +38,7 @@ .. _using git: https://docs.github.com/en/github/using-git .. _requirements/ci/: https://github.com/SciTools/iris/tree/main/requirements/ci .. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/ +.. _issues on GitHub: https://github.com/SciTools/iris/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc .. comment @@ -52,6 +53,7 @@ .. _@cpelley: https://github.com/cpelley .. _@djkirkham: https://github.com/djkirkham .. _@DPeterK: https://github.com/DPeterK +.. _@ESadek-MO: https://github.com/ESadek-MO .. _@esc24: https://github.com/esc24 .. _@jamesp: https://github.com/jamesp .. _@jonseddon: https://github.com/jonseddon @@ -63,6 +65,7 @@ .. _@QuLogic: https://github.com/QuLogic .. _@rcomer: https://github.com/rcomer .. _@rhattersley: https://github.com/rhattersley +.. _@schlunma: https://github.com/schlunma .. _@stephenworsley: https://github.com/stephenworsley .. _@tkknight: https://github.com/tkknight .. _@trexfeathers: https://github.com/trexfeathers diff --git a/docs/src/conf.py b/docs/src/conf.py index 19f22e808f..33864c4658 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -20,15 +20,16 @@ # ---------------------------------------------------------------------------- import datetime +from importlib.metadata import version as get_version import ntpath import os from pathlib import Path import re +from subprocess import run import sys +from urllib.parse import quote import warnings -import iris - # function to write useful output to stdout, prefixing the source. def autolog(message): @@ -41,20 +42,33 @@ def autolog(message): # -- Are we running on the readthedocs server, if so do some setup ----------- on_rtd = os.environ.get("READTHEDOCS") == "True" +# This is the rtd reference to the version, such as: latest, stable, v3.0.1 etc +rtd_version = os.environ.get("READTHEDOCS_VERSION") +if rtd_version is not None: + # Make rtd_version safe for use in shields.io badges. + rtd_version = rtd_version.replace("_", "__") + rtd_version = rtd_version.replace("-", "--") + rtd_version = quote(rtd_version) + +# branch, tag, external (for pull request builds), or unknown. +rtd_version_type = os.environ.get("READTHEDOCS_VERSION_TYPE") + +# For local testing purposes we can force being on RTD and the version +# on_rtd = True # useful for testing +# rtd_version = "latest" # useful for testing +# rtd_version = "stable" # useful for testing +# rtd_version_type = "tag" # useful for testing +# rtd_version = "my_branch" # useful for testing + if on_rtd: autolog("Build running on READTHEDOCS server") # list all the READTHEDOCS environment variables that may be of use - # at some point autolog("Listing all environment variables on the READTHEDOCS server...") for item, value in os.environ.items(): autolog("[READTHEDOCS] {} = {}".format(item, value)) -# This is the rtd reference to the version, such as: latest, stable, v3.0.1 etc -# For local testing purposes this could be explicitly set latest or stable. -rtd_version = os.environ.get("READTHEDOCS_VERSION") - # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, @@ -82,20 +96,11 @@ def autolog(message): author = "Iris Developers" # The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. - -# The short X.Y version. -if iris.__version__ == "dev": - version = "dev" -else: - # major.minor.patch-dev -> major.minor.patch - version = ".".join(iris.__version__.split("-")[0].split(".")[:3]) -# The full version, including alpha/beta/rc tags. -release = iris.__version__ - -autolog("Iris Version = {}".format(version)) -autolog("Iris Release = {}".format(release)) +# |version|, also used in various other places throughout the built documents. +version = get_version("scitools-iris") +release = version +autolog(f"Iris Version = {version}") +autolog(f"Iris Release = {release}") # -- General configuration --------------------------------------------------- @@ -158,7 +163,6 @@ def _dotv(version): "sphinx_gallery.gen_gallery", "matplotlib.sphinxext.mathmpl", "matplotlib.sphinxext.plot_directive", - "image_test_output", ] if skip_api == "1": @@ -171,6 +175,7 @@ def _dotv(version): # -- panels extension --------------------------------------------------------- # See https://sphinx-panels.readthedocs.io/en/latest/ +panels_add_bootstrap_css = False # -- Napoleon extension ------------------------------------------------------- # See https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html @@ -229,6 +234,7 @@ def _dotv(version): "numpy": ("https://numpy.org/doc/stable/", None), "python": ("https://docs.python.org/3/", None), "scipy": ("https://docs.scipy.org/doc/scipy/", None), + "pandas": ("https://pandas.pydata.org/docs/", None), } # The name of the Pygments (syntax highlighting) style to use. @@ -246,6 +252,10 @@ def _dotv(version): extlinks = { "issue": ("https://github.com/SciTools/iris/issues/%s", "Issue #"), "pull": ("https://github.com/SciTools/iris/pull/%s", "PR #"), + "discussion": ( + "https://github.com/SciTools/iris/discussions/%s", + "Discussion #", + ), } # -- Doctest ("make doctest")-------------------------------------------------- @@ -257,43 +267,68 @@ def _dotv(version): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_logo = "_static/iris-logo-title.png" -html_favicon = "_static/favicon.ico" -html_theme = "sphinx_rtd_theme" +html_logo = "_static/iris-logo-title.svg" +html_favicon = "_static/iris-logo.svg" +html_theme = "pydata_sphinx_theme" + +# See https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/configuring.html#configure-the-search-bar-position +html_sidebars = { + "**": [ + "custom_sidebar_logo_version", + "search-field", + "sidebar-nav-bs", + "sidebar-ethical-ads", + ] +} +# See https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/configuring.html html_theme_options = { - "display_version": True, - "style_external_links": True, - "logo_only": "True", + "footer_items": ["copyright", "sphinx-version", "custom_footer"], + "collapse_navigation": True, + "navigation_depth": 3, + "show_prev_next": True, + "navbar_align": "content", + "github_url": "https://github.com/SciTools/iris", + "twitter_url": "https://twitter.com/scitools_iris", + # icons available: https://fontawesome.com/v5.15/icons?d=gallery&m=free + "icon_links": [ + { + "name": "GitHub Discussions", + "url": "https://github.com/SciTools/iris/discussions", + "icon": "far fa-comments", + }, + { + "name": "PyPI", + "url": "https://pypi.org/project/scitools-iris/", + "icon": "fas fa-box", + }, + { + "name": "Conda", + "url": "https://anaconda.org/conda-forge/iris", + "icon": "fas fa-boxes", + }, + ], + "use_edit_page_button": True, + "show_toc_level": 1, } +rev_parse = run(["git", "rev-parse", "--short", "HEAD"], capture_output=True) +commit_sha = rev_parse.stdout.decode().strip() + html_context = { + # pydata_theme + "github_repo": "iris", + "github_user": "scitools", + "github_version": "main", + "doc_path": "docs/src", + # custom + "on_rtd": on_rtd, "rtd_version": rtd_version, + "rtd_version_type": rtd_version_type, "version": version, "copyright_years": copyright_years, "python_version": build_python_version, - # menu_links and menu_links_name are used in _templates/layout.html - # to include some nice icons. See http://fontawesome.io for a list of - # icons (used in the sphinx_rtd_theme) - "menu_links_name": "Support", - "menu_links": [ - ( - ' Source Code', - "https://github.com/SciTools/iris", - ), - ( - ' GitHub Discussions', - "https://github.com/SciTools/iris/discussions", - ), - ( - ' StackOverflow for "How Do I?"', - "https://stackoverflow.com/questions/tagged/python-iris", - ), - ( - ' Legacy Documentation', - "https://scitools.org.uk/iris/docs/v2.4.0/index.html", - ), - ], + "commit_sha": commit_sha, } # Add any paths that contain custom static files (such as style sheets) here, @@ -302,12 +337,24 @@ def _dotv(version): html_static_path = ["_static"] html_style = "theme_override.css" +# this allows for using datatables: https://datatables.net/ +html_css_files = [ + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", +] + +html_js_files = [ + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", +] + # url link checker. Some links work but report as broken, lets ignore them. # See https://www.sphinx-doc.org/en/1.2/config.html#options-for-the-linkcheck-builder linkcheck_ignore = [ + "http://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499", "http://cfconventions.org", "http://code.google.com/p/msysgit/downloads/list", "http://effbot.org", + "https://help.github.com", + "https://docs.github.com", "https://github.com", "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", "http://schacon.github.com/git", @@ -316,6 +363,7 @@ def _dotv(version): "https://software.ac.uk/how-cite-software", "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", + "https://www.metoffice.gov.uk/", ] # list of sources to exclude from the build. @@ -335,6 +383,11 @@ def _dotv(version): "ignore_pattern": r"__init__\.py", # force gallery building, unless overridden (see src/Makefile) "plot_gallery": "'True'", + # force re-registering of nc-time-axis with matplotlib for each example, + # required for sphinx-gallery>=0.11.0 + "reset_modules": ( + lambda gallery_conf, fname: sys.modules.pop("nc_time_axis", None), + ), } # ----------------------------------------------------------------------------- diff --git a/docs/src/developers_guide/assets/developer-settings-github-apps.png b/docs/src/developers_guide/assets/developer-settings-github-apps.png new file mode 100644 index 0000000000..a63994d087 Binary files /dev/null and b/docs/src/developers_guide/assets/developer-settings-github-apps.png differ diff --git a/docs/src/developers_guide/assets/download-pem.png b/docs/src/developers_guide/assets/download-pem.png new file mode 100644 index 0000000000..cbceb1304d Binary files /dev/null and b/docs/src/developers_guide/assets/download-pem.png differ diff --git a/docs/src/developers_guide/assets/generate-key.png b/docs/src/developers_guide/assets/generate-key.png new file mode 100644 index 0000000000..ac894dc71b Binary files /dev/null and b/docs/src/developers_guide/assets/generate-key.png differ diff --git a/docs/src/developers_guide/assets/gha-token-example.png b/docs/src/developers_guide/assets/gha-token-example.png new file mode 100644 index 0000000000..cba1cf6935 Binary files /dev/null and b/docs/src/developers_guide/assets/gha-token-example.png differ diff --git a/docs/src/developers_guide/assets/install-app.png b/docs/src/developers_guide/assets/install-app.png new file mode 100644 index 0000000000..31259de588 Binary files /dev/null and b/docs/src/developers_guide/assets/install-app.png differ diff --git a/docs/src/developers_guide/assets/install-iris-actions.png b/docs/src/developers_guide/assets/install-iris-actions.png new file mode 100644 index 0000000000..db16dee55b Binary files /dev/null and b/docs/src/developers_guide/assets/install-iris-actions.png differ diff --git a/docs/src/developers_guide/assets/installed-app.png b/docs/src/developers_guide/assets/installed-app.png new file mode 100644 index 0000000000..ab87032393 Binary files /dev/null and b/docs/src/developers_guide/assets/installed-app.png differ diff --git a/docs/src/developers_guide/assets/iris-actions-secret.png b/docs/src/developers_guide/assets/iris-actions-secret.png new file mode 100644 index 0000000000..f32456d0f2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-actions-secret.png differ diff --git a/docs/src/developers_guide/assets/iris-github-apps.png b/docs/src/developers_guide/assets/iris-github-apps.png new file mode 100644 index 0000000000..50753532b7 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-github-apps.png differ diff --git a/docs/src/developers_guide/assets/iris-secrets-created.png b/docs/src/developers_guide/assets/iris-secrets-created.png new file mode 100644 index 0000000000..19b0ba11dc Binary files /dev/null and b/docs/src/developers_guide/assets/iris-secrets-created.png differ diff --git a/docs/src/developers_guide/assets/iris-security-actions.png b/docs/src/developers_guide/assets/iris-security-actions.png new file mode 100644 index 0000000000..7cbe3a7dc2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-security-actions.png differ diff --git a/docs/src/developers_guide/assets/iris-settings.png b/docs/src/developers_guide/assets/iris-settings.png new file mode 100644 index 0000000000..70714235c2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-settings.png differ diff --git a/docs/src/developers_guide/assets/org-perms-members.png b/docs/src/developers_guide/assets/org-perms-members.png new file mode 100644 index 0000000000..99fd8985e2 Binary files /dev/null and b/docs/src/developers_guide/assets/org-perms-members.png differ diff --git a/docs/src/developers_guide/assets/repo-perms-contents.png b/docs/src/developers_guide/assets/repo-perms-contents.png new file mode 100644 index 0000000000..4c325c334d Binary files /dev/null and b/docs/src/developers_guide/assets/repo-perms-contents.png differ diff --git a/docs/src/developers_guide/assets/repo-perms-pull-requests.png b/docs/src/developers_guide/assets/repo-perms-pull-requests.png new file mode 100644 index 0000000000..812f5ef951 Binary files /dev/null and b/docs/src/developers_guide/assets/repo-perms-pull-requests.png differ diff --git a/docs/src/developers_guide/assets/scitools-settings.png b/docs/src/developers_guide/assets/scitools-settings.png new file mode 100644 index 0000000000..8d7e728ab5 Binary files /dev/null and b/docs/src/developers_guide/assets/scitools-settings.png differ diff --git a/docs/src/developers_guide/assets/user-perms.png b/docs/src/developers_guide/assets/user-perms.png new file mode 100644 index 0000000000..607c7dcdb6 Binary files /dev/null and b/docs/src/developers_guide/assets/user-perms.png differ diff --git a/docs/src/developers_guide/assets/webhook-active.png b/docs/src/developers_guide/assets/webhook-active.png new file mode 100644 index 0000000000..538362f335 Binary files /dev/null and b/docs/src/developers_guide/assets/webhook-active.png differ diff --git a/docs/src/developers_guide/asv_example_images/commits.png b/docs/src/developers_guide/asv_example_images/commits.png new file mode 100644 index 0000000000..4e0d695322 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/commits.png differ diff --git a/docs/src/developers_guide/asv_example_images/comparison.png b/docs/src/developers_guide/asv_example_images/comparison.png new file mode 100644 index 0000000000..e146d30696 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/comparison.png differ diff --git a/docs/src/developers_guide/asv_example_images/scalability.png b/docs/src/developers_guide/asv_example_images/scalability.png new file mode 100644 index 0000000000..260c3ef536 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/scalability.png differ diff --git a/docs/src/developers_guide/ci_checks.png b/docs/src/developers_guide/ci_checks.png old mode 100755 new mode 100644 index e088e03a66..54ab672b3c Binary files a/docs/src/developers_guide/ci_checks.png and b/docs/src/developers_guide/ci_checks.png differ diff --git a/docs/src/developers_guide/contributing_benchmarks.rst b/docs/src/developers_guide/contributing_benchmarks.rst new file mode 100644 index 0000000000..65bc9635b6 --- /dev/null +++ b/docs/src/developers_guide/contributing_benchmarks.rst @@ -0,0 +1,62 @@ +.. include:: ../common_links.inc + +.. _contributing.benchmarks: + +Benchmarking +============ +Iris includes architecture for benchmarking performance and other metrics of +interest. This is done using the `Airspeed Velocity`_ (ASV) package. + +Full detail on the setup and how to run or write benchmarks is in +`benchmarks/README.md`_ in the Iris repository. + +Continuous Integration +---------------------- +The primary purpose of `Airspeed Velocity`_, and Iris' specific benchmarking +setup, is to monitor for performance changes using statistical comparison +between commits, and this forms part of Iris' continuous integration. + +Accurately assessing performance takes longer than functionality pass/fail +tests, so the benchmark suite is not automatically run against open pull +requests, instead it is **run overnight against each the commits of the +previous day** to check if any commit has introduced performance shifts. +Detected shifts are reported in a new Iris GitHub issue. + +If a pull request author/reviewer suspects their changes may cause performance +shifts, a convenience is available (currently via Nox) to replicate the +overnight benchmark run but comparing the current ``HEAD`` with a requested +branch (e.g. ``upstream/main``). Read more in `benchmarks/README.md`_. + +Other Uses +---------- +Even when not statistically comparing commits, ASV's accurate execution time +results - recorded using a sophisticated system of repeats - have other +applications. + +* Absolute numbers can be interpreted providing they are recorded on a + dedicated resource. +* Results for a series of commits can be visualised for an intuitive + understanding of when and why changes occurred. + + .. image:: asv_example_images/commits.png + :width: 300 + +* Parameterised benchmarks make it easy to visualise: + + * Comparisons + + .. image:: asv_example_images/comparison.png + :width: 300 + + * Scalability + + .. image:: asv_example_images/scalability.png + :width: 300 + +This also isn't limited to execution times. ASV can also measure memory demand, +and even arbitrary numbers (e.g. file size, regridding accuracy), although +without the repetition logic that execution timing has. + + +.. _Airspeed Velocity: https://github.com/airspeed-velocity/asv +.. _benchmarks/README.md: https://github.com/SciTools/iris/blob/main/benchmarks/README.md diff --git a/docs/src/developers_guide/contributing_ci_tests.rst b/docs/src/developers_guide/contributing_ci_tests.rst index 0257ff7cff..1d06434843 100644 --- a/docs/src/developers_guide/contributing_ci_tests.rst +++ b/docs/src/developers_guide/contributing_ci_tests.rst @@ -13,51 +13,50 @@ The `Iris`_ GitHub repository is configured to run checks against all its branches automatically whenever a pull-request is created, updated or merged. The checks performed are: -* :ref:`testing_cirrus` +* :ref:`testing_gha` * :ref:`testing_cla` * :ref:`pre_commit_ci` -.. _testing_cirrus: +.. _testing_gha: -Cirrus-CI -********* +GitHub Actions +************** Iris unit and integration tests are an essential mechanism to ensure that the Iris code base is working as expected. :ref:`developer_running_tests` may be performed manually by a developer locally. However Iris is configured to -use the `cirrus-ci`_ service for automated Continuous Integration (CI) testing. +use `GitHub Actions`_ (GHA) for automated Continuous Integration (CI) testing. -The `cirrus-ci`_ configuration file `.cirrus.yml`_ in the root of the Iris repository -defines the tasks to be performed by `cirrus-ci`_. For further details -refer to the `Cirrus-CI Documentation`_. The tasks performed during CI include: +The Iris GHA YAML configuration files in the ``.github/workflows`` directory +defines the CI tasks to be performed. For further details +refer to the `GitHub Actions`_ documentation. The tasks performed during CI include: -* linting the code base and ensuring it adheres to the `black`_ format * running the system, integration and unit tests for Iris * ensuring the documentation gallery builds successfully * performing all doc-tests within the code base * checking all URL references within the code base and documentation are valid -The above `cirrus-ci`_ tasks are run automatically against all `Iris`_ branches +The above GHA tasks are run automatically against all `Iris`_ branches on GitHub whenever a pull-request is submitted, updated or merged. See the -`Cirrus-CI Dashboard`_ for details of recent past and active Iris jobs. +`Iris GitHub Actions`_ dashboard for details of recent past and active CI jobs. -.. _cirrus_test_env: +.. _gha_test_env: -Cirrus CI Test environment --------------------------- +GitHub Actions Test Environment +------------------------------- -The test environment on the Cirrus-CI service is determined from the requirement files -in ``requirements/ci/py**.yml``. These are conda environment files that list the entire -set of build, test and run requirements for Iris. +The CI test environments for our GHA is determined from the requirement files +in ``requirements/ci/pyXX.yml``. These are conda environment files list the top-level +package dependencies for running and testing Iris. For reproducible test results, these environments are resolved for all their dependencies -and stored as lock files in ``requirements/ci/nox.lock``. The test environments will not -resolve the dependencies each time, instead they will use the lock file to reproduce the -same exact environment each time. +and stored as conda lock files in the ``requirements/ci/nox.lock`` directory. The test environments +will not resolve the dependencies each time, instead they will use the lock files to reproduce the +exact same environment each time. -**If you have updated the requirement yaml files with new dependencies, you will need to +**If you have updated the requirement YAML files with new dependencies, you will need to generate new lock files.** To do this, run the command:: python tools/update_lockfiles.py -o requirements/ci/nox.lock requirements/ci/py*.yml @@ -68,49 +67,22 @@ or simply:: and add the changed lockfiles to your pull request. +.. note:: + + If your installation of conda runs through Artifactory or another similar + proxy then you will need to amend that lockfile to use URLs that Github + Actions can access. A utility to strip out Artifactory exists in the + ``ssstack`` tool. + New lockfiles are generated automatically each week to ensure that Iris continues to be tested against the latest available version of its dependencies. Each week the yaml files in ``requirements/ci`` are resolved by a GitHub Action. If the resolved environment has changed, a pull request is created with the new lock files. -The CI test suite will run on this pull request and fixes for failed tests can be pushed to -the ``auto-update-lockfiles`` branch to be included in the PR. -Once a developer has pushed to this branch, the auto-update process will not run again until -the PR is merged, to prevent overwriting developer commits. -The auto-updater can still be invoked manually in this situation by going to the `GitHub Actions`_ -page for the workflow, and manually running using the "Run Workflow" button. -By default, this will also not override developer commits. To force an update, you must -confirm "yes" in the "Run Worflow" prompt. - - -.. _skipping Cirrus-CI tasks: - -Skipping Cirrus-CI Tasks ------------------------- - -As a developer you may wish to not run all the CI tasks when you are actively -developing e.g., you are writing documentation and there is no need for linting, -or long running compute intensive testing tasks to be executed. - -As a convenience, it is possible to easily skip one or more tasks by setting -the appropriate environment variable within the `.cirrus.yml`_ file to a -**non-empty** string: - -* ``SKIP_LINT_TASK`` to skip `flake8`_ linting and `black`_ formatting -* ``SKIP_TEST_MINIMAL_TASK`` to skip restricted unit and integration testing -* ``SKIP_TEST_FULL_TASK`` to skip full unit and integration testing -* ``SKIP_GALLERY_TASK`` to skip building the documentation gallery -* ``SKIP_DOCTEST_TASK`` to skip running the documentation doc-tests -* ``SKIP_LINKCHECK_TASK`` to skip checking for broken documentation URL references -* ``SKIP_ALL_TEST_TASKS`` which is equivalent to setting ``SKIP_TEST_MINIMAL_TASK`` and ``SKIP_TEST_FULL_TASK`` -* ``SKIP_ALL_DOC_TASKS`` which is equivalent to setting ``SKIP_GALLERY_TASK``, ``SKIP_DOCTEST_TASK``, and ``SKIP_LINKCHECK_TASK`` - -e.g., to skip the linting task, the following are all equivalent:: - - SKIP_LINT_TASK: "1" - SKIP_LINT_TASK: "true" - SKIP_LINT_TASK: "false" - SKIP_LINT_TASK: "skip" - SKIP_LINT_TASK: "unicorn" +The CI test suite will run on this pull request. If the tests fail, a developer +will need to create a new branch based off the ``auto-update-lockfiles`` branch +and add the required fixes to this new branch. If the fixes are made to the +``auto-update-lockfiles`` branch these will be overwritten the next time the +Github Action is run. GitHub Checklist @@ -146,9 +118,5 @@ pull-requests given the `Iris`_ GitHub repository `.pre-commit-config.yaml`_. See the `pre-commit.ci dashboard`_ for details of recent past and active Iris jobs. - -.. _Cirrus-CI Dashboard: https://cirrus-ci.com/github/SciTools/iris -.. _Cirrus-CI Documentation: https://cirrus-ci.org/guide/writing-tasks/ .. _.pre-commit-config.yaml: https://github.com/SciTools/iris/blob/main/.pre-commit-config.yaml .. _pre-commit.ci dashboard: https://results.pre-commit.ci/repo/github/5312648 -.. _GitHub Actions: https://github.com/SciTools/iris/actions/workflows/refresh-lockfiles.yml diff --git a/docs/src/developers_guide/contributing_codebase_index.rst b/docs/src/developers_guide/contributing_codebase_index.rst index 88986c0c7a..b59a196ff0 100644 --- a/docs/src/developers_guide/contributing_codebase_index.rst +++ b/docs/src/developers_guide/contributing_codebase_index.rst @@ -1,7 +1,7 @@ .. _contributing.documentation.codebase: -Contributing to the Code Base -============================= +Working with the Code Base +========================== .. toctree:: :maxdepth: 3 diff --git a/docs/src/developers_guide/contributing_deprecations.rst b/docs/src/developers_guide/contributing_deprecations.rst index 1ecafdca9f..0b22e2cbd2 100644 --- a/docs/src/developers_guide/contributing_deprecations.rst +++ b/docs/src/developers_guide/contributing_deprecations.rst @@ -25,29 +25,29 @@ deprecation is accompanied by the introduction of a new public API. Under these circumstances the following points apply: - - Using the deprecated API must result in a concise deprecation warning which - is an instance of :class:`iris.IrisDeprecation`. - It is easiest to call - :func:`iris._deprecation.warn_deprecated`, which is a - simple wrapper to :func:`warnings.warn` with the signature - `warn_deprecation(message, **kwargs)`. - - Where possible, your deprecation warning should include advice on - how to avoid using the deprecated API. For example, you might - reference a preferred API, or more detailed documentation elsewhere. - - You must update the docstring for the deprecated API to include a - Sphinx deprecation directive: - - :literal:`.. deprecated:: ` - - where you should replace `` with the major and minor version - of Iris in which this API is first deprecated. For example: `1.8`. - - As with the deprecation warning, you should include advice on how to - avoid using the deprecated API within the content of this directive. - Feel free to include more detail in the updated docstring than in the - deprecation warning. - - You should check the documentation for references to the deprecated - API and update them as appropriate. +- Using the deprecated API must result in a concise deprecation warning which + is an instance of :class:`iris.IrisDeprecation`. + It is easiest to call + :func:`iris._deprecation.warn_deprecated`, which is a + simple wrapper to :func:`warnings.warn` with the signature + `warn_deprecation(message, **kwargs)`. +- Where possible, your deprecation warning should include advice on + how to avoid using the deprecated API. For example, you might + reference a preferred API, or more detailed documentation elsewhere. +- You must update the docstring for the deprecated API to include a + Sphinx deprecation directive: + + :literal:`.. deprecated:: ` + + where you should replace `` with the major and minor version + of Iris in which this API is first deprecated. For example: `1.8`. + + As with the deprecation warning, you should include advice on how to + avoid using the deprecated API within the content of this directive. + Feel free to include more detail in the updated docstring than in the + deprecation warning. +- You should check the documentation for references to the deprecated + API and update them as appropriate. Changing a Default ------------------ @@ -64,14 +64,14 @@ it causes the corresponding public API to use its new default behaviour. The following points apply in addition to those for removing a public API: - - You should add a new boolean attribute to :data:`iris.FUTURE` (by - modifying :class:`iris.Future`) that controls the default behaviour - of the public API that needs updating. The initial state of the new - boolean attribute should be `False`. You should name the new boolean - attribute to indicate that setting it to `True` will select the new - default behaviour. - - You should include a reference to this :data:`iris.FUTURE` flag in your - deprecation warning and corresponding Sphinx deprecation directive. +- You should add a new boolean attribute to :data:`iris.FUTURE` (by + modifying :class:`iris.Future`) that controls the default behaviour + of the public API that needs updating. The initial state of the new + boolean attribute should be `False`. You should name the new boolean + attribute to indicate that setting it to `True` will select the new + default behaviour. +- You should include a reference to this :data:`iris.FUTURE` flag in your + deprecation warning and corresponding Sphinx deprecation directive. Removing a Deprecation @@ -94,11 +94,11 @@ and/or example code should be removed/updated as appropriate. Changing a Default ------------------ - - You should update the initial state of the relevant boolean attribute - of :data:`iris.FUTURE` to `True`. - - You should deprecate setting the relevant boolean attribute of - :class:`iris.Future` in the same way as described in - :ref:`removing-a-public-api`. +- You should update the initial state of the relevant boolean attribute + of :data:`iris.FUTURE` to `True`. +- You should deprecate setting the relevant boolean attribute of + :class:`iris.Future` in the same way as described in + :ref:`removing-a-public-api`. .. rubric:: Footnotes diff --git a/docs/src/developers_guide/contributing_documentation_full.rst b/docs/src/developers_guide/contributing_documentation_full.rst index 77b898c0f3..ac62a67373 100755 --- a/docs/src/developers_guide/contributing_documentation_full.rst +++ b/docs/src/developers_guide/contributing_documentation_full.rst @@ -1,3 +1,4 @@ +.. include:: ../common_links.inc .. _contributing.documentation_full: @@ -31,7 +32,7 @@ The build can be run from the documentation directory ``docs/src``. The build output for the html is found in the ``_build/html`` sub directory. When updating the documentation ensure the html build has *no errors* or -*warnings* otherwise it may fail the automated `cirrus-ci`_ build. +*warnings* otherwise it may fail the automated `Iris GitHub Actions`_ build. Once the build is complete, if it is rerun it will only rebuild the impacted build artefacts so should take less time. @@ -66,21 +67,25 @@ This is useful for a final test before committing your changes. have been promoted to be **errors** to ensure they are addressed. This **only** applies when ``make html`` is run. -.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris - .. _contributing.documentation.testing: Testing ~~~~~~~ -There are a ways to test various aspects of the documentation. The -``make`` commands shown below can be run in the ``docs`` or -``docs/src`` directory. +There are various ways to test aspects of the documentation. Each :ref:`contributing.documentation.gallery` entry has a corresponding test. -To run the tests:: +To run all the gallery tests:: + + pytest -v docs/gallery_tests/test_gallery_examples.py + +To run a test for a single gallery example, use the ``pytest -k`` option for +pattern matching, e.g.:: + + pytest -v -k plot_coriolis docs/gallery_tests/test_gallery_examples.py - make gallerytest +The ``make`` commands shown below can be run in the ``docs`` or ``docs/src`` +directory. Many documentation pages includes python code itself that can be run to ensure it is still valid or to demonstrate examples. To ensure these tests pass @@ -115,7 +120,7 @@ or ignore the url. ``spelling_word_list_filename``. -.. note:: In addition to the automated `cirrus-ci`_ build of all the +.. note:: In addition to the automated `Iris GitHub Actions`_ build of all the documentation build options above, the https://readthedocs.org/ service is also used. The configuration of this held in a file in the root of the @@ -148,7 +153,7 @@ can exclude the module from the API documentation. Add the entry to the Gallery ~~~~~~~ -The Iris :ref:`sphx_glr_generated_gallery` uses a sphinx extension named +The Iris :ref:`gallery_index` uses a sphinx extension named `sphinx-gallery `_ that auto generates reStructuredText (rst) files based upon a gallery source directory that abides directory and filename convention. diff --git a/docs/src/developers_guide/contributing_getting_involved.rst b/docs/src/developers_guide/contributing_getting_involved.rst index f7bd4733a3..9ec6559114 100644 --- a/docs/src/developers_guide/contributing_getting_involved.rst +++ b/docs/src/developers_guide/contributing_getting_involved.rst @@ -1,8 +1,9 @@ .. include:: ../common_links.inc .. _development_where_to_start: +.. _developers_guide: -Getting Involved +Developers Guide ---------------- Iris_ is an Open Source project hosted on Github and as such anyone with a @@ -17,7 +18,7 @@ The `Iris GitHub`_ project has been configured to use templates for each of the above issue types when creating a `new issue`_ to ensure the appropriate information is provided. -Alternatively, **join the conversation** in `Iris GitHub Discussions`_, when +Alternatively, **join the conversation** in Iris `GitHub Discussions`_, when you would like the opinions of the Iris community. A `pull request`_ may also be created by anyone who has become a @@ -25,7 +26,7 @@ A `pull request`_ may also be created by anyone who has become a ``main`` branch are only given to **core developers** of Iris_, this is to ensure a measure of control. -To get started we suggest reading recent `issues`_, `discussions`_ and +To get started we suggest reading recent `issues`_, `GitHub Discussions`_ and `pull requests`_ for Iris. If you are new to using GitHub we recommend reading the @@ -36,5 +37,30 @@ If you are new to using GitHub we recommend reading the `Governance `_ section of the `SciTools`_ ogranization web site. - .. _GitHub getting started: https://docs.github.com/en/github/getting-started-with-github + + +.. toctree:: + :maxdepth: 1 + :caption: Developers Guide + :name: development_index + :hidden: + + gitwash/index + contributing_documentation + contributing_codebase_index + contributing_changes + github_app + release + + +.. toctree:: + :maxdepth: 1 + :caption: Reference + :hidden: + + ../generated/api/iris + ../whatsnew/index + ../techpapers/index + ../copyright + ../voted_issues diff --git a/docs/src/developers_guide/contributing_graphics_tests.rst b/docs/src/developers_guide/contributing_graphics_tests.rst index 1268aa2686..7964c008c5 100644 --- a/docs/src/developers_guide/contributing_graphics_tests.rst +++ b/docs/src/developers_guide/contributing_graphics_tests.rst @@ -2,72 +2,17 @@ .. _testing.graphics: -Graphics Tests -************** +Adding or Updating Graphics Tests +================================= -Iris may be used to create various forms of graphical output; to ensure -the output is consistent, there are automated tests to check against -known acceptable graphical output. See :ref:`developer_running_tests` for -more information. - -At present graphical tests are used in the following areas of Iris: - -* Module ``iris.tests.test_plot`` -* Module ``iris.tests.test_quickplot`` -* :ref:`sphx_glr_generated_gallery` plots contained in - ``docs/gallery_tests``. - - -Challenges -========== - -Iris uses many dependencies that provide functionality, an example that -applies here is matplotlib_. For more information on the dependences, see -:ref:`installing_iris`. When there are updates to the matplotlib_ or a -dependency of matplotlib, this may result in a change in the rendered graphical -output. This means that there may be no changes to Iris_, but due to an -updated dependency any automated tests that compare a graphical output to a -known acceptable output may fail. The failure may also not be visually -perceived as it may be a simple pixel shift. - - -Testing Strategy -================ - -The `Iris Cirrus-CI matrix`_ defines multiple test runs that use -different versions of Python to ensure Iris is working as expected. - -To make this manageable, the ``iris.tests.IrisTest_nometa.check_graphic`` test -routine tests against multiple alternative **acceptable** results. It does -this using an image **hash** comparison technique which avoids storing -reference images in the Iris repository itself. - -This consists of: - - * The ``iris.tests.IrisTest_nometa.check_graphic`` function uses a perceptual - **image hash** of the outputs (see https://github.com/JohannesBuchner/imagehash) - as the basis for checking test results. - - * The hashes of known **acceptable** results for each test are stored in a - lookup dictionary, saved to the repo file - ``lib/iris/tests/results/imagerepo.json`` - (`link `_) . - - * An actual reference image for each hash value is stored in a *separate* - public repository https://github.com/SciTools/test-iris-imagehash. - - * The reference images allow human-eye assessment of whether a new output is - judged to be close enough to the older ones, or not. - - * The utility script ``iris/tests/idiff.py`` automates checking, enabling the - developer to easily compare proposed new **acceptable** result images - against the existing accepted reference images, for each failing test. +.. note:: -The acceptable images for each test can be viewed online. The :ref:`testing.imagehash_index` lists all the graphical tests in the test suite and -shows the known acceptable result images for comparison. + If a large number of images tests are failing due to an update to the + libraries used for image hashing, follow the instructions on + :ref:`refresh-imagerepo`. -Reviewing Failing Tests -======================= +Generating New Results +---------------------- When you find that a graphics test in the Iris testing suite has failed, following changes in Iris or the run dependencies, this is the process @@ -76,14 +21,24 @@ you should follow: #. Create a new, empty directory to store temporary image results, at the path ``lib/iris/tests/result_image_comparison`` in your Iris repository checkout. -#. **In your Iris repo root directory**, run the relevant (failing) tests - directly as python scripts, or by using a command such as:: +#. Run the relevant (failing) tests directly as python scripts, or using + ``pytest``. + +The results of the failing image tests will now be available in +``lib/iris/tests/result_image_comparison``. + +.. note:: + + The ``result_image_comparison`` folder is covered by a project + ``.gitignore`` setting, so those files *will not show up* in a + ``git status`` check. - python -m unittest discover paths/to/test/files +Reviewing Failing Tests +----------------------- -#. In the ``iris/lib/iris/tests`` folder, run the command:: +#. Run ``iris/lib/iris/tests/graphics/idiff.py`` with python, e.g.: - python idiff.py + python idiff.py This will open a window for you to visually inspect side-by-side **old**, **new** and **difference** images for each failed @@ -92,29 +47,28 @@ you should follow: If the change is **accepted**: - * the imagehash value of the new result image is added into the relevant - set of 'valid result hashes' in the image result database file, - ``tests/results/imagerepo.json`` + * the imagehash value of the new result image is added into the relevant + set of 'valid result hashes' in the image result database file, + ``tests/results/imagerepo.json`` - * the relevant output file in ``tests/result_image_comparison`` is - renamed according to the image hash value, as ``.png``. - A copy of this new PNG file must then be added into the reference image - repository at https://github.com/SciTools/test-iris-imagehash - (See below). + * the relevant output file in ``tests/result_image_comparison`` is renamed + according to the test name. A copy of this new PNG file must then be added + into the ``iris-test-data`` repository, at + https://github.com/SciTools/iris-test-data (See below). If a change is **skipped**: - * no further changes are made in the repo. + * no further changes are made in the repo. - * when you run ``iris/tests/idiff.py`` again, the skipped choice will be - presented again. + * when you run ``iris/tests/idiff.py`` again, the skipped choice will be + presented again. If a change is **rejected**: - * the output image is deleted from ``result_image_comparison``. + * the output image is deleted from ``result_image_comparison``. - * when you run ``iris/tests/idiff.py`` again, the skipped choice will not - appear, unless the relevant failing test is re-run. + * when you run ``iris/tests/idiff.py`` again, the skipped choice will not + appear, unless the relevant failing test is re-run. #. **Now re-run the tests**. The **new** result should now be recognised and the relevant test should pass. However, some tests can perform *multiple* @@ -123,46 +77,66 @@ you should follow: re-run may encounter further (new) graphical test failures. If that happens, simply repeat the check-and-accept process until all tests pass. +#. You're now ready to :ref:`add-graphics-test-changes` -Add Your Changes to Iris -======================== -To add your changes to Iris, you need to make two pull requests (PR). +Adding a New Image Test +----------------------- -#. The first PR is made in the ``test-iris-imagehash`` repository, at - https://github.com/SciTools/test-iris-imagehash. +If you attempt to run ``idiff.py`` when there are new graphical tests for which +no baseline yet exists, you will get a warning that ``idiff.py`` is ``Ignoring +unregistered test result...``. In this case, - * First, add all the newly-generated referenced PNG files into the - ``images/v4`` directory. In your Iris repo, these files are to be found - in the temporary results folder ``iris/tests/result_image_comparison``. +#. rename the relevant images from ``iris/tests/result_image_comparison`` by - * Then, to update the file which lists available images, - ``v4_files_listing.txt``, run from the project root directory:: + * removing the ``result-`` prefix - python recreate_v4_files_listing.py + * fully qualifying the test name if it isn't already (i.e. it should start + ``iris.tests...``or ``gallery_tests...``) - * Create a PR proposing these changes, in the usual way. +#. run the tests in the mode that lets them create missing data (see + :ref:`create-missing`). This will update ``imagerepo.json`` with the new + test name and image hash. -#. The second PR is created in the Iris_ repository, and - should only include the change to the image results database, - ``tests/results/imagerepo.json``. - The description box of this pull request should contain a reference to - the matching one in ``test-iris-imagehash``. +#. and then add them to the Iris test data as covered in + :ref:`add-graphics-test-changes`. -.. note:: - The ``result_image_comparison`` folder is covered by a project - ``.gitignore`` setting, so those files *will not show up* in a - ``git status`` check. +.. _refresh-imagerepo: -.. important:: +Refreshing the Stored Hashes +---------------------------- - The Iris pull-request will not test successfully in Cirrus-CI until the - ``test-iris-imagehash`` pull request has been merged. This is because there - is an Iris_ test which ensures the existence of the reference images (uris) - for all the targets in the image results database. It will also fail - if you forgot to run ``recreate_v4_files_listing.py`` to update the - image-listing file in ``test-iris-imagehash``. +From time to time, a new version of the image hashing library will cause all +image hashes to change. The image hashes stored in +``tests/results/imagerepo.json`` can be refreshed using the baseline images +stored in the ``iris-test-data`` repository (at +https://github.com/SciTools/iris-test-data) using the script +``tests/graphics/recreate_imagerepo.py``. Use the ``--help`` argument for the +command line arguments. -.. _Iris Cirrus-CI matrix: https://github.com/scitools/iris/blob/main/.cirrus.yml +.. _add-graphics-test-changes: + +Add Your Changes to Iris +------------------------ + +To add your changes to Iris, you need to make two pull requests (PR). + +#. The first PR is made in the ``iris-test-data`` repository, at + https://github.com/SciTools/iris-test-data. + + * Add all the newly-generated referenced PNG files into the + ``test_data/images`` directory. In your Iris repo, these files are to be found + in the temporary results folder ``iris/tests/result_image_comparison``. + + * Create a PR proposing these changes, in the usual way. + +#. The second PR is the one that makes the changes you intend to the Iris_ repository. + The description box of this pull request should contain a reference to + the matching one in ``iris-test-data``. + + * This PR should include updating the version of the test data in + ``.github/workflows/ci-tests.yml`` and + ``.github/workflows/ci-docs-tests.yml`` to the new version created by the + merging of your ``iris-test-data`` PR. diff --git a/docs/src/developers_guide/contributing_pull_request_checklist.rst b/docs/src/developers_guide/contributing_pull_request_checklist.rst index 5afb461d68..57bc9fd728 100644 --- a/docs/src/developers_guide/contributing_pull_request_checklist.rst +++ b/docs/src/developers_guide/contributing_pull_request_checklist.rst @@ -16,8 +16,8 @@ is merged. Before submitting a pull request please consider this list. #. **Provide a helpful description** of the Pull Request. This should include: - * The aim of the change / the problem addressed / a link to the issue. - * How the change has been delivered. + * The aim of the change / the problem addressed / a link to the issue. + * How the change has been delivered. #. **Include a "What's New" entry**, if appropriate. See :ref:`whats_new_contributions`. @@ -31,10 +31,11 @@ is merged. Before submitting a pull request please consider this list. #. **Check all new dependencies added to the** `requirements/ci/`_ **yaml files.** If dependencies have been added then new nox testing lockfiles - should be generated too, see :ref:`cirrus_test_env`. + should be generated too, see :ref:`gha_test_env`. #. **Check the source documentation been updated to explain all new or changed - features**. See :ref:`docstrings`. + features**. Note, we now use numpydoc strings. Any touched code should + be updated to use the docstrings formatting. See :ref:`docstrings`. #. **Include code examples inside the docstrings where appropriate**. See :ref:`contributing.documentation.testing`. @@ -42,8 +43,6 @@ is merged. Before submitting a pull request please consider this list. #. **Check the documentation builds without warnings or errors**. See :ref:`contributing.documentation.building` -#. **Check for any new dependencies in the** `.cirrus.yml`_ **config file.** - #. **Check for any new dependencies in the** `readthedocs.yml`_ **file**. This file is used to build the documentation that is served from https://scitools-iris.readthedocs.io/en/latest/ @@ -51,12 +50,10 @@ is merged. Before submitting a pull request please consider this list. #. **Check for updates needed for supporting projects for test or example data**. For example: - * `iris-test-data`_ is a github project containing all the data to support - the tests. - * `iris-sample-data`_ is a github project containing all the data to support - the gallery and examples. - * `test-iris-imagehash`_ is a github project containing reference plot - images to support Iris :ref:`testing.graphics`. + * `iris-test-data`_ is a github project containing all the data to support + the tests. + * `iris-sample-data`_ is a github project containing all the data to support + the gallery and examples. If new files are required by tests or code examples, they must be added to the appropriate supporting project via a suitable pull-request. This pull diff --git a/docs/src/developers_guide/contributing_running_tests.rst b/docs/src/developers_guide/contributing_running_tests.rst index ab36172283..f60cedba05 100644 --- a/docs/src/developers_guide/contributing_running_tests.rst +++ b/docs/src/developers_guide/contributing_running_tests.rst @@ -5,13 +5,22 @@ Running the Tests ***************** -Using setuptools for Testing Iris -================================= +There are two options for running the tests: -.. warning:: The `setuptools`_ ``test`` command was deprecated in `v41.5.0`_. See :ref:`using nox`. +* Use an environment you created yourself. This requires more manual steps to + set up, but gives you more flexibility. For example, you can run a subset of + the tests or use ``python`` interactively to investigate any issues. See + :ref:`test manual env`. -A prerequisite of running the tests is to have the Python environment -setup. For more information on this see :ref:`installing_from_source`. +* Use ``nox``. This will automatically generate an environment and run test + sessions consistent with our GitHub continuous integration. See :ref:`using nox`. + +.. _test manual env: + +Testing Iris in a Manually Created Environment +============================================== + +To create a suitable environment for running the tests, see :ref:`installing_from_source`. Many Iris tests will use data that may be defined in the test itself, however this is not always the case as sometimes example files may be used. Due to @@ -32,81 +41,76 @@ The example command below uses ``~/projects`` as the parent directory:: git clone git@github.com:SciTools/iris-test-data.git export OVERRIDE_TEST_DATA_REPOSITORY=~/projects/iris-test-data/test_data -All the Iris tests may be run from the root ``iris`` project directory via:: +All the Iris tests may be run from the root ``iris`` project directory using +``pytest``. For example:: - python setup.py test - -You can also run a specific test, the example below runs the tests for -mapping:: + pytest -n 2 - cd lib/iris/tests - python test_mapping.py +will run the tests across two processes. For more options, use the command +``pytest -h``. Below is a trimmed example of the output:: -When running the test directly as above you can view the command line options -using the commands ``python test_mapping.py -h`` or -``python test_mapping.py --help``. + ============================= test session starts ============================== + platform linux -- Python 3.10.5, pytest-7.1.2, pluggy-1.0.0 + rootdir: /path/to/git/clone/iris, configfile: pyproject.toml, testpaths: lib/iris + plugins: xdist-2.5.0, forked-1.4.0 + gw0 I / gw1 I + gw0 [6361] / gw1 [6361] -.. tip:: A useful command line option to use is ``-d``. This will display - matplotlib_ figures as the tests are run. For example:: - - python test_mapping.py -d - - You can also use the ``-d`` command line option when running all - the tests but this will take a while to run and will require the - manual closing of each of the figures for the tests to continue. - -The output from running the tests is verbose as it will run ~5000 separate -tests. Below is a trimmed example of the output:: - - running test - Running test suite(s): default - - Running test discovery on iris.tests with 2 processors. - test_circular_subset (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - test_cross_section (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - test_different_cs (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - ... + ........................................................................ [ 1%] + ........................................................................ [ 2%] + ........................................................................ [ 3%] ... - test_ellipsoid (iris.tests.unit.experimental.raster.test_export_geotiff.TestProjection) ... SKIP: Test requires 'gdal'. - test_no_ellipsoid (iris.tests.unit.experimental.raster.test_export_geotiff.TestProjection) ... SKIP: Test requires 'gdal'. + .......................ssssssssssssssssss............................... [ 99%] + ........................ [100%] + =============================== warnings summary =============================== ... + -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html + =========================== short test summary info ============================ + SKIPPED [1] lib/iris/tests/experimental/test_raster.py:152: Test requires 'gdal'. + SKIPPED [1] lib/iris/tests/experimental/test_raster.py:155: Test requires 'gdal'. ... - test_slice (iris.tests.test_util.TestAsCompatibleShape) ... ok - test_slice_and_transpose (iris.tests.test_util.TestAsCompatibleShape) ... ok - test_transpose (iris.tests.test_util.TestAsCompatibleShape) ... ok - - ---------------------------------------------------------------------- - Ran 4762 tests in 238.649s - - OK (SKIP=22) + ========= 6340 passed, 21 skipped, 1659 warnings in 193.57s (0:03:13) ========== There may be some tests that have been **skipped**. This is due to a Python decorator being present in the test script that will intentionally skip a test if a certain condition is not met. In the example output above there are -**22** skipped tests, at the point in time when this was run this was primarily -due to an experimental dependency not being present. - +**21** skipped tests. At the point in time when this was run this was due to an +experimental dependency not being present. .. tip:: The most common reason for tests to be skipped is when the directory for the ``iris-test-data`` has not been set which would shows output such as:: - test_coord_coord_map (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - test_coord_coord (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - test_coord_cube (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - + SKIPPED [1] lib/iris/tests/unit/fileformats/test_rules.py:157: Test(s) require external data. + SKIPPED [1] lib/iris/tests/unit/fileformats/pp/test__interpret_field.py:97: Test(s) require external data. + SKIPPED [1] lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py:29: Test(s) require external data. + All Python decorators that skip tests will be defined in ``lib/iris/tests/__init__.py`` with a function name with a prefix of ``skip_``. +You can also run a specific test module. The example below runs the tests for +mapping:: + + cd lib/iris/tests + python test_mapping.py + +When running the test directly as above you can view the command line options +using the commands ``python test_mapping.py -h`` or +``python test_mapping.py --help``. + +.. tip:: A useful command line option to use is ``-d``. This will display + matplotlib_ figures as the tests are run. For example:: + + python test_mapping.py -d .. _using nox: Using Nox for Testing Iris ========================== -Iris has adopted the use of the `nox`_ tool for automated testing on `cirrus-ci`_ +The `nox`_ tool has for adopted for automated testing on `Iris GitHub Actions`_ and also locally on the command-line for developers. `nox`_ is similar to `tox`_, but instead leverages the expressiveness and power of a Python @@ -124,15 +128,12 @@ automates the process of: * building the documentation and executing the doc-tests * building the documentation gallery * running the documentation URL link check -* linting the code-base -* ensuring the code-base style conforms to the `black`_ standard - You can perform all of these tasks manually yourself, however the onus is on you to first ensure that all of the required package dependencies are installed and available in the testing environment. `Nox`_ has been configured to automatically do this for you, and provides a means to easily replicate -the remote testing behaviour of `cirrus-ci`_ locally for the developer. +the remote testing behaviour of `Iris GitHub Actions`_ locally for the developer. Installing Nox diff --git a/docs/src/developers_guide/contributing_testing.rst b/docs/src/developers_guide/contributing_testing.rst index d0c96834a9..a65bcebd55 100644 --- a/docs/src/developers_guide/contributing_testing.rst +++ b/docs/src/developers_guide/contributing_testing.rst @@ -8,8 +8,8 @@ Test Categories There are two main categories of tests within Iris: - - :ref:`testing.unit_test` - - :ref:`testing.integration` +- :ref:`testing.unit_test` +- :ref:`testing.integration` Ideally, all code changes should be accompanied by one or more unit tests, and by zero or more integration tests. diff --git a/docs/src/developers_guide/contributing_testing_index.rst b/docs/src/developers_guide/contributing_testing_index.rst index c5cf1b997b..2f5ae411e8 100644 --- a/docs/src/developers_guide/contributing_testing_index.rst +++ b/docs/src/developers_guide/contributing_testing_index.rst @@ -7,7 +7,8 @@ Testing :maxdepth: 3 contributing_testing + testing_tools contributing_graphics_tests - imagehash_index contributing_running_tests contributing_ci_tests + contributing_benchmarks diff --git a/docs/src/developers_guide/documenting/docstrings.rst b/docs/src/developers_guide/documenting/docstrings.rst index 8a06024ee2..eeefc71e40 100644 --- a/docs/src/developers_guide/documenting/docstrings.rst +++ b/docs/src/developers_guide/documenting/docstrings.rst @@ -8,10 +8,10 @@ Every public object in the Iris package should have an appropriate docstring. This is important as the docstrings are used by developers to understand the code and may be read directly in the source or via the :ref:`Iris`. -This document has been influenced by the following PEP's, - - * Attribute Docstrings :pep:`224` - * Docstring Conventions :pep:`257` +.. note:: + As of April 2022 we are looking to adopt `numpydoc`_ strings as standard. + We aim to complete the adoption over time as we do changes to the codebase. + For examples of use see `numpydoc`_ and `sphinxcontrib-napoleon`_ For consistency always use: @@ -20,91 +20,14 @@ For consistency always use: docstrings. * ``u"""Unicode triple-quoted string"""`` for Unicode docstrings -All docstrings should be written in reST (reStructuredText) markup. See the -:ref:`reST_quick_start` for more detail. - -There are two forms of docstrings: **single-line** and **multi-line** -docstrings. - - -Single-Line Docstrings -====================== - -The single line docstring of an object must state the **purpose** of that -object, known as the **purpose section**. This terse overview must be on one -line and ideally no longer than 80 characters. - - -Multi-Line Docstrings -===================== - -Multi-line docstrings must consist of at least a purpose section akin to the -single-line docstring, followed by a blank line and then any other content, as -described below. The entire docstring should be indented to the same level as -the quotes at the docstring's first line. - - -Description ------------ - -The multi-line docstring *description section* should expand on what was -stated in the one line *purpose section*. The description section should try -not to document *argument* and *keyword argument* details. Such information -should be documented in the following *arguments and keywords section*. - - -Sample Multi-Line Docstring ---------------------------- - -Here is a simple example of a standard docstring: - -.. literalinclude:: docstrings_sample_routine.py - -This would be rendered as: - - .. currentmodule:: documenting.docstrings_sample_routine - - .. automodule:: documenting.docstrings_sample_routine - :members: - :undoc-members: - -Additionally, a summary can be extracted automatically, which would result in: - - .. autosummary:: - - documenting.docstrings_sample_routine.sample_routine - - -Documenting Classes -=================== - -The class constructor should be documented in the docstring for its -``__init__`` or ``__new__`` method. Methods should be documented by their own -docstring, not in the class header itself. - -If a class subclasses another class and its behaviour is mostly inherited from -that class, its docstring should mention this and summarise the differences. -Use the verb "override" to indicate that a subclass method replaces a -superclass method and does not call the superclass method; use the verb -"extend" to indicate that a subclass method calls the superclass method -(in addition to its own behaviour). - - -Attribute and Property Docstrings ---------------------------------- - -Here is a simple example of a class containing an attribute docstring and a -property docstring: - -.. literalinclude:: docstrings_attribute.py +All docstrings can use reST (reStructuredText) markup to augment the +rendered formatting. See the :ref:`reST_quick_start` for more detail. -This would be rendered as: +For more information including examples pleasee see: - .. currentmodule:: documenting.docstrings_attribute +* `numpydoc`_ +* `sphinxcontrib-napoleon`_ - .. automodule:: documenting.docstrings_attribute - :members: - :undoc-members: -.. note:: The purpose section of the property docstring **must** state whether - the property is read-only. +.. _numpydoc: https://numpydoc.readthedocs.io/en/latest/format.html#style-guide +.. _sphinxcontrib-napoleon: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_numpy.html \ No newline at end of file diff --git a/docs/src/developers_guide/documenting/rest_guide.rst b/docs/src/developers_guide/documenting/rest_guide.rst index 4845132b15..c4330b1e63 100644 --- a/docs/src/developers_guide/documenting/rest_guide.rst +++ b/docs/src/developers_guide/documenting/rest_guide.rst @@ -14,8 +14,8 @@ reST is a lightweight markup language intended to be highly readable in source format. This guide will cover some of the more frequently used advanced reST markup syntaxes, for the basics of reST the following links may be useful: - * https://www.sphinx-doc.org/en/master/usage/restructuredtext/ - * http://packages.python.org/an_example_pypi_project/sphinx.html +* https://www.sphinx-doc.org/en/master/usage/restructuredtext/ +* http://packages.python.org/an_example_pypi_project/sphinx.html Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html. diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index 576fc5f6a6..aa19722a69 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -1,24 +1,21 @@ +.. include:: ../../common_links.inc + .. _whats_new_contributions: ================================= Contributing a "What's New" Entry ================================= -Iris uses a file named ``dev.rst`` to keep a draft of upcoming development changes -that will form the next stable release. Contributions to the :ref:`iris_whatsnew` -document are written by the developer most familiar with the change made. -The contribution should be included as part of the Iris Pull Request that -introduces the change. +Iris uses a file named ``latest.rst`` to keep a draft of upcoming development +changes that will form the next stable release. Contributions to the +:ref:`iris_whatsnew` document are written by the developer most familiar +with the change made. The contribution should be included as part of +the Iris Pull Request that introduces the change. -The ``dev.rst`` and the past release notes are kept in the +The ``latest.rst`` and the past release notes are kept in the ``docs/src/whatsnew/`` directory. If you are writing the first contribution after -an Iris release: **create the new** ``dev.rst`` by copying the content from -``dev.rst.template`` in the same directory. - -.. note:: - - Ensure that the symbolic link ``latest.rst`` references the ``dev.rst`` file - within the ``docs/src/whatsnew`` directory. +an Iris release: **create the new** ``latest.rst`` by copying the content from +``latest.rst.template`` in the same directory. Since the `Contribution categories`_ include Internal changes, **all** Iris Pull Requests should be accompanied by a "What's New" contribution. @@ -27,7 +24,7 @@ Pull Requests should be accompanied by a "What's New" contribution. Git Conflicts ============= -If changes to ``dev.rst`` are being suggested in several simultaneous +If changes to ``latest.rst`` are being suggested in several simultaneous Iris Pull Requests, Git will likely encounter merge conflicts. If this situation is thought likely (large PR, high repo activity etc.): @@ -38,17 +35,17 @@ situation is thought likely (large PR, high repo activity etc.): a **new pull request** be created specifically for the "What's New" entry, which references the main pull request and titled (e.g. for PR#9999): - What's New for #9999 + What's New for #9999 * PR author: create the "What's New" pull request * PR reviewer: once the "What's New" PR is created, **merge the main PR**. - (this will fix any `cirrus-ci`_ linkcheck errors where the links in the + (this will fix any `Iris GitHub Actions`_ linkcheck errors where the links in the "What's New" PR reference new features introduced in the main PR) * PR reviewer: review the "What's New" PR, merge once acceptable -These measures should mean the suggested ``dev.rst`` changes are outstanding +These measures should mean the suggested ``latest.rst`` changes are outstanding for the minimum time, minimising conflicts and minimising the need to rebase or merge from trunk. @@ -74,6 +71,9 @@ The required content, in order, is as follows: user name. Link the name to their GitHub profile. E.g. ```@tkknight `_ changed...`` + * Bigger changes take a lot of effort to review, too! Make sure you credit + the reviewer(s) where appropriate. + * The new/changed behaviour * Context to the change. Possible examples include: what this fixes, why @@ -87,8 +87,9 @@ The required content, in order, is as follows: For example:: - #. `@tkknight `_ changed changed argument ``x`` - to be optional in :class:`~iris.module.class` and + #. `@tkknight `_ and + `@trexfeathers `_ (reviewer) changed + argument ``x`` to be optional in :class:`~iris.module.class` and :meth:`iris.module.method`. This allows greater flexibility as requested in :issue:`9999`. (:pull:`1111`, :pull:`9999`) @@ -98,13 +99,11 @@ links to code. For more inspiration on possible content and references, please examine past what's :ref:`iris_whatsnew` entries. .. note:: The reStructuredText syntax will be checked as part of building - the documentation. Any warnings should be corrected. - `cirrus-ci`_ will automatically build the documentation when + the documentation. Any warnings should be corrected. The + `Iris GitHub Actions`_ will automatically build the documentation when creating a pull request, however you can also manually :ref:`build ` the documentation. -.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris - Contribution Categories ======================= diff --git a/docs/src/developers_guide/github_app.rst b/docs/src/developers_guide/github_app.rst new file mode 100644 index 0000000000..402cfe0c75 --- /dev/null +++ b/docs/src/developers_guide/github_app.rst @@ -0,0 +1,281 @@ +.. include:: ../common_links.inc + +Token GitHub App +---------------- + +.. note:: + + This section of the documentation is applicable only to GitHub `SciTools`_ + Organisation **owners** and **administrators**. + +.. note:: + + The ``iris-actions`` GitHub App has been rebranded with the more generic + name ``scitools-ci``, as the app can be used for any `SciTools`_ repository, + not just ``iris`` specifically. + + All of the following instructions are still applicable. + + +This section describes how to create, configure, install and use our `SciTools`_ +GitHub App for generating tokens for use with *GitHub Actions* (GHA). + + +Background +^^^^^^^^^^ + +Our GitHub *Continuous Integration* (CI) workflows require fully reproducible +`conda`_ environments to test ``iris`` and build our documentation. + +The ``iris`` `refresh-lockfiles`_ GHA workflow uses the `conda-lock`_ package to routinely +generate a platform specific ``lockfile`` containing all the package dependencies +required by ``iris`` for a specific version of ``python``. + +The environment lockfiles created by the `refresh-lockfiles`_ GHA are contributed +back to ``iris`` though a pull-request that is automatically generated using the +third-party `create-pull-request`_ GHA. By default, pull-requests created by such an +action using the standard ``GITHUB_TOKEN`` **cannot** trigger other workflows, such +as our CI. + +As a result, we use a dedicated authentication **GitHub App** to securely generate tokens +for the `create-pull-request`_ GHA, which then permits our full suite of CI testing workflows +to be triggered against the lockfiles pull-request. Ensuring that the CI is triggered gives us +confidence that the proposed new lockfiles have not introduced a package level incompatibility +or issue within ``iris``. See :ref:`use gha`. + + +Create GitHub App +^^^^^^^^^^^^^^^^^ + +The **GitHub App** is created for the sole purpose of generating tokens for use with actions, +and **must** be owned by the `SciTools`_ organisation. + +To create a minimal `GitHub App`_ for this purpose, perform the following steps: + +1. Click the `SciTools`_ organisation ``⚙️ Settings`` option. + +.. figure:: assets/scitools-settings.png + :alt: SciTools organisation Settings option + :align: center + :width: 75% + +2. Click the ``GitHub Apps`` option from the ``<> Developer settings`` + section in the left hand sidebar. + +.. figure:: assets/developer-settings-github-apps.png + :alt: Developer settings, GitHub Apps option + :align: center + :width: 25% + +3. Now click the ``New GitHub App`` button to display the ``Register new GitHub App`` + form. + +Within the ``Register new GitHub App`` form, complete the following fields: + +4. Set the **mandatory** ``GitHub App name`` field to be ``iris-actions``. +5. Set the **mandatory** ``Homepage URL`` field to be ``https://github.com/SciTools/iris`` +6. Under the ``Webhook`` section, **uncheck** the ``Active`` checkbox. + Note that, **no** ``Webhook URL`` is required. + +.. figure:: assets/webhook-active.png + :alt: Webhook active checkbox + :align: center + :width: 75% + +7. Under the ``Repository permissions`` section, set the ``Contents`` field to + be ``Access: Read and write``. + +.. figure:: assets/repo-perms-contents.png + :alt: Repository permissions Contents option + :align: center + :width: 75% + +8. Under the ``Repository permissions`` section, set the ``Pull requests`` field + to be ``Access: Read and write``. + +.. figure:: assets/repo-perms-pull-requests.png + :alt: Repository permissions Pull requests option + :align: center + :width: 75% + +9. Under the ``Organization permissions`` section, set the ``Members`` field to + be ``Access: Read-only``. + +.. figure:: assets/org-perms-members.png + :alt: Organization permissions Members + :align: center + :width: 75% + +10. Under the ``User permissions`` section, for the ``Where can this GitHub App be installed?`` + field, **check** the ``Only on this account`` radio-button i.e., only allow + this GitHub App to be installed on the **SciTools** account. + +.. figure:: assets/user-perms.png + :alt: User permissions + :align: center + :width: 75% + +11. Finally, click the ``Create GitHub App`` button. + + +Configure GitHub App +^^^^^^^^^^^^^^^^^^^^ + +Creating the GitHub App will automatically redirect you to the ``SciTools settings / iris-actions`` +form for the newly created app. + +Perform the following GitHub App configuration steps: + +.. _app id: + +1. Under the ``About`` section, note of the GitHub ``App ID`` as this value is + required later. See :ref:`gha secrets`. +2. Under the ``Display information`` section, optionally upload the ``iris`` logo + as a ``png`` image. +3. Under the ``Private keys`` section, click the ``Generate a private key`` button. + +.. figure:: assets/generate-key.png + :alt: Private keys Generate a private key + :align: center + :width: 75% + +.. _private key: + +GitHub will automatically generate a private key to sign access token requests +for the app. Also a separate browser pop-up window will appear with the GitHub +App private key in ``OpenSSL PEM`` format. + +.. figure:: assets/download-pem.png + :alt: Download OpenSSL PEM file + :align: center + :width: 50% + +.. important:: + + Please ensure that you save the ``OpenSSL PEM`` file and **securely** archive + its contents. The private key within this file is required later. + See :ref:`gha secrets`. + + +Install GitHub App +^^^^^^^^^^^^^^^^^^ + +To install the GitHub App: + +1. Select the ``Install App`` option from the top left menu of the + ``Scitools settings / iris-actions`` form, then click the ``Install`` button. + +.. figure:: assets/install-app.png + :alt: Private keys Generate a private key + :align: center + :width: 75% + +2. Select the ``Only select repositories`` radio-button from the ``Install iris-actions`` + form, and choose the ``SciTools/iris`` repository. + +.. figure:: assets/install-iris-actions.png + :alt: Install iris-actions GitHub App + :align: center + :width: 75% + +3. Click the ``Install`` button. + + The successfully installed ``iris-actions`` GitHub App is now available under + the ``GitHub Apps`` option in the ``Integrations`` section of the `SciTools`_ + organisation ``Settings``. Note that, to reconfigure the installed app click + the ``⚙️ App settings`` option. + +.. figure:: assets/installed-app.png + :alt: Installed GitHub App + :align: center + :width: 80% + +4. Finally, confirm that the ``iris-actions`` GitHub App is now available within + the `SciTools/iris`_ repository by clicking the ``GitHub apps`` option in the + ``⚙️ Settings`` section. + +.. figure:: assets/iris-github-apps.png + :alt: Iris installed GitHub App + :align: center + :width: 80% + + +.. _gha secrets: + +Create Repository Secrets +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The GitHub Action that requests an access token from the ``iris-actions`` +GitHub App must be configured with the following information: + +* the ``App ID``, and +* the ``OpenSSL PEM`` private key + +associated with the ``iris-actions`` GitHub App. This **sensitive** information is +made **securely** available by creating `SciTools/iris`_ repository secrets: + +1. Click the `SciTools/iris`_ repository ``⚙️ Settings`` option. + +.. figure:: assets/iris-settings.png + :alt: Iris Settings + :align: center + :width: 75% + +2. Click the ``Actions`` option from the ``Security`` section in the left hand + sidebar. + +.. figure:: assets/iris-security-actions.png + :alt: Iris Settings Security Actions + :align: center + :width: 25% + +3. Click the ``New repository secret`` button. + +.. figure:: assets/iris-actions-secret.png + :alt: Iris Actions Secret + :align: center + :width: 75% + +4. Complete the ``Actions secrets / New secret`` form for the ``App ID``: + + * Set the ``Name`` field to be ``AUTH_APP_ID``. + * Set the ``Value`` field to be the numerical ``iris-actions`` GitHub ``App ID``. + See :ref:`here `. + * Click the ``Add secret`` button. + +5. Click the ``New repository secret`` button again, and complete the form + for the ``OpenSSL PEM``: + + * Set the ``Name`` field to be ``AUTH_APP_PRIVATE_KEY``. + * Set the ``Value`` field to be the entire contents of the ``OpenSSL PEM`` file. + See :ref:`here `. + * Click the ``Add secret`` button. + +A summary of the newly created `SciTools/iris`_ repository secrets is now available: + +.. figure:: assets/iris-secrets-created.png + :alt: Iris Secrets created + :align: center + :width: 75% + + +.. _use gha: + +Use GitHub App +^^^^^^^^^^^^^^ + +The following example workflow shows how to use the `github-app-token`_ GHA +to generate a token for use with the `create-pull-request`_ GHA: + +.. figure:: assets/gha-token-example.png + :alt: GitHub Action token example + :align: center + :width: 50% + + +.. _GitHub App: https://docs.github.com/en/developers/apps/building-github-apps/creating-a-github-app +.. _SciTools/iris: https://github.com/SciTools/iris +.. _conda-lock: https://github.com/conda-incubator/conda-lock +.. _create-pull-request: https://github.com/peter-evans/create-pull-request +.. _github-app-token: https://github.com/tibdex/github-app-token +.. _refresh-lockfiles: https://github.com/SciTools/iris/blob/main/.github/workflows/refresh-lockfiles.yml diff --git a/docs/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst index 0536ebfb62..b086922d5b 100644 --- a/docs/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -25,7 +25,7 @@ In what follows we'll refer to the upstream iris ``main`` branch, as * If you can possibly avoid it, avoid merging trunk or any other branches into your feature branch while you are working. * If you do find yourself merging from trunk, consider :ref:`rebase-on-trunk` -* Ask on the `Iris GitHub Discussions`_ if you get stuck. +* Ask on the Iris `GitHub Discussions`_ if you get stuck. * Ask for code review! This way of working helps to keep work well organized, with readable history. @@ -157,7 +157,7 @@ Ask for Your Changes to be Reviewed or Merged When you are ready to ask for someone to review your code and consider a merge: #. Go to the URL of your forked repo, say - ``http://github.com/your-user-name/iris``. + ``https://github.com/your-user-name/iris``. #. Use the 'Switch Branches' dropdown menu near the top left of the page to select the branch with your changes: @@ -190,7 +190,7 @@ Delete a Branch on Github git push origin :my-unwanted-branch Note the colon ``:`` before ``test-branch``. See also: -http://github.com/guides/remove-a-remote-branch +https://github.com/guides/remove-a-remote-branch Several People Sharing a Single Repository @@ -203,7 +203,7 @@ share it via github. First fork iris into your account, as from :ref:`forking`. Then, go to your forked repository github page, say -``http://github.com/your-user-name/iris``, select :guilabel:`Settings`, +``https://github.com/your-user-name/iris``, select :guilabel:`Settings`, :guilabel:`Manage Access` and then :guilabel:`Invite collaborator`. .. note:: For more information on sharing your repository see the diff --git a/docs/src/developers_guide/gitwash/forking.rst b/docs/src/developers_guide/gitwash/forking.rst index 161847ed79..247e3cf678 100644 --- a/docs/src/developers_guide/gitwash/forking.rst +++ b/docs/src/developers_guide/gitwash/forking.rst @@ -7,7 +7,7 @@ Making Your own Copy (fork) of Iris =================================== You need to do this only once. The instructions here are very similar -to the instructions at http://help.github.com/forking/, please see +to the instructions at https://help.github.com/forking/, please see that page for more detail. We're repeating some of it here just to give the specifics for the `Iris`_ project, and to suggest some default names. diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index 9a87b55d4d..11d037ccf4 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -9,8 +9,8 @@ nipy, NIPY, Nipy, etc... .. _git: http://git-scm.com/ -.. _github: http://github.com -.. _github help: http://help.github.com +.. _github: https://github.com +.. _github help: https://help.github.com .. _git documentation: https://git-scm.com/docs .. _git clone: http://schacon.github.com/git/git-clone.html diff --git a/docs/src/developers_guide/imagehash_index.rst b/docs/src/developers_guide/imagehash_index.rst deleted file mode 100644 index a11ae8a531..0000000000 --- a/docs/src/developers_guide/imagehash_index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. include:: ../common_links.inc - -.. _testing.imagehash_index: - -Graphical Test Hash Index -************************* - -The iris test suite produces plots of data using matplotlib and cartopy. -The images produced are compared to known "good" output, the images for -which are kept in `scitools/test-iris-imagehash `_. - -For an overview of iris' graphics tests, see :ref:`testing.graphics` - -Typically running the iris test suite will output the rendered -images to ``$PROJECT_DIR/iris_image_test_output``. -The known good output for each test can be seen at the links below -for comparison. - - -.. imagetest-list:: \ No newline at end of file diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index f4d44781fc..25a426e20b 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -19,7 +19,8 @@ A Release Manager will be nominated for each release of Iris. This role involves * deciding which features and bug fixes should be included in the release * managing the project board for the release -* using a `GitHub Releases Discussion Forum`_ for documenting intent and capturing any +* using :discussion:`GitHub Discussion releases category ` + for documenting intent and capturing any discussion about the release The Release Manager will make the release, ensuring that all the steps outlined @@ -99,12 +100,14 @@ Steps to achieve this can be found in the :ref:`iris_development_releases_steps` The Release ----------- -The final steps of the release are to change the version string ``__version__`` -in the source of :literal:`iris.__init__.py` and ensure the release date and details +The final steps of the release are to ensure that the release date and details are correct in the relevant ``whatsnew`` page within the documentation. -Once all checks are complete, the release is cut by the creation of a new tag -in the ``SciTools/iris`` repository. +There is no need to update the ``iris.__version__``, as this is managed +automatically by `setuptools-scm`_. + +Once all checks are complete, the release is published on GitHub by +creating a new tag in the ``SciTools/iris`` repository. Update conda-forge @@ -120,6 +123,14 @@ conda package on the `conda-forge Anaconda channel`_. Update PyPI ----------- +.. note:: + + As part of our Continuous-Integration (CI), the building and publishing of + PyPI artifacts is now automated by a dedicated GitHub Action. + + The following instructions **no longer** require to be performed manually, + but remain part of the documentation for reference purposes only. + Update the `scitools-iris`_ project on PyPI with the latest Iris release. To do this perform the following steps. @@ -178,14 +189,14 @@ For further details on how to test Iris, see :ref:`developer_running_tests`. Merge Back ---------- -After the release is cut, the changes from the release branch should be merged +After the release is published, the changes from the release branch should be merged back onto the ``SciTools/iris`` ``main`` branch. To achieve this, first cut a local branch from the latest ``main`` branch, and `git merge` the :literal:`.x` release branch into it. Ensure that the -``iris.__version__``, ``docs/src/whatsnew/index.rst``, ``docs/src/whatsnew/dev.rst``, -and ``docs/src/whatsnew/latest.rst`` are correct, before committing these changes -and then proposing a pull-request on the ``main`` branch of ``SciTools/iris``. +``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` are +correct, before committing these changes and then proposing a pull-request +on the ``main`` branch of ``SciTools/iris``. Point Releases @@ -218,24 +229,24 @@ Release Steps #. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` #. Update the ``whatsnew`` for the release: - * Use ``git`` to rename ``docs/src/whatsnew/dev.rst`` to the release - version file ``v1.9.rst`` - * Update the symbolic link ``latest.rst`` to reference the latest - whatsnew ``v1.9.rst`` - * Use ``git`` to delete the ``docs/src/whatsnew/dev.rst.template`` file - * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. - Note that, the Iris version and release date are updated automatically - when the documentation is built - * Review the file for correctness - * Work with the development team to populate the ``Release Highlights`` - dropdown at the top of the file, which provides extra detail on notable - changes - * Use ``git`` to add and commit all changes, including removal of - ``dev.rst.template`` and update to the ``latest.rst`` symbolic link. + * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release + version file ``v1.9.rst`` + * Update ``docs/src/whatsnews/index.rst`` to rename ``latest.rst`` in the + include statement and toctree. + * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file + * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. + Note that, the Iris version and release date are updated automatically + when the documentation is built + * Review the file for correctness + * Work with the development team to populate the ``Release Highlights`` + dropdown at the top of the file, which provides extra detail on notable + changes + * Use ``git`` to add and commit all changes, including removal of + ``latest.rst.template``. #. Update the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` - * Remove the reference to ``dev.rst`` + * Remove the reference to ``latest.rst`` * Add a reference to ``v1.9.rst`` to the top of the list #. Check your changes by building the documentation and reviewing @@ -256,12 +267,14 @@ Post Release Steps `Read The Docs`_ to ensure that the appropriate versions are ``Active`` and/or ``Hidden``. To do this ``Edit`` the appropriate version e.g., see `Editing v3.0.0rc0`_ (must be logged into Read the Docs). +#. Make a new ``latest.rst`` from ``latest.rst.template`` and update the include + statement and the toctree in ``index.rst`` to point at the new + ``latest.rst``. #. Merge back to ``main`` .. _SciTools/iris: https://github.com/SciTools/iris .. _tag on the SciTools/Iris: https://github.com/SciTools/iris/releases -.. _GitHub Releases Discussion Forum: https://github.com/SciTools/iris/discussions/categories/releases .. _conda-forge Anaconda channel: https://anaconda.org/conda-forge/iris .. _conda-forge iris-feedstock: https://github.com/conda-forge/iris-feedstock .. _CFEP-05: https://github.com/conda-forge/cfep/blob/master/cfep-05.md @@ -271,4 +284,5 @@ Post Release Steps .. _rc_iris: https://anaconda.org/conda-forge/iris/labels .. _Generating Distribution Archives: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives .. _Packaging Your Project: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project -.. _latest CF standard names: http://cfconventions.org/standard-names.html \ No newline at end of file +.. _latest CF standard names: http://cfconventions.org/standard-names.html +.. _setuptools-scm: https://github.com/pypa/setuptools_scm \ No newline at end of file diff --git a/docs/src/developers_guide/testing_tools.rst b/docs/src/developers_guide/testing_tools.rst new file mode 100755 index 0000000000..dd628d37fc --- /dev/null +++ b/docs/src/developers_guide/testing_tools.rst @@ -0,0 +1,80 @@ +.. include:: ../common_links.inc + +.. _testing_tools: + +Testing tools +************* + +Iris has various internal convenience functions and utilities available to +support writing tests. Using these makes tests quicker and easier to write, and +also consistent with the rest of Iris (which makes it easier to work with the +code). Most of these conveniences are accessed through the +:class:`iris.tests.IrisTest` class, from +which Iris' test classes then inherit. + +.. tip:: + + All functions listed on this page are defined within + :mod:`iris.tests.__init__.py` as methods of + :class:`iris.tests.IrisTest_nometa` (which :class:`iris.tests.IrisTest` + inherits from). They can be accessed within a test using + ``self.exampleFunction``. + +Custom assertions +================= + +:class:`iris.tests.IrisTest` supports a variety of custom unittest-style +assertions, such as :meth:`~iris.tests.IrisTest_nometa.assertArrayEqual`, +:meth:`~iris.tests.IrisTest_nometa.assertArrayAlmostEqual`. + +.. _create-missing: + +Saving results +-------------- + +Some tests compare the generated output to the expected result contained in a +file. Custom assertions for this include +:meth:`~iris.tests.IrisTest_nometa.assertCMLApproxData` +:meth:`~iris.tests.IrisTest_nometa.assertCDL` +:meth:`~iris.tests.IrisTest_nometa.assertCML` and +:meth:`~iris.tests.IrisTest_nometa.assertTextFile`. See docstrings for more +information. + +.. note:: + + Sometimes code changes alter the results expected from a test containing the + above methods. These can be updated by removing the existing result files + and then running the file containing the test with a ``--create-missing`` + command line argument, or setting the ``IRIS_TEST_CREATE_MISSING`` + environment variable to anything non-zero. This will create the files rather + than erroring, allowing you to commit the updated results. + +Context managers +================ + +Capturing exceptions and logging +-------------------------------- + +:class:`iris.tests.IrisTest` includes several context managers that can be used +to make test code tidier and easier to read. These include +:meth:`~iris.tests.IrisTest_nometa.assertWarnsRegexp` and +:meth:`~iris.tests.IrisTest_nometa.assertLogs`. + +Temporary files +--------------- + +It's also possible to generate temporary files in a concise fashion with +:meth:`~iris.tests.IrisTest_nometa.temp_filename`. + +Patching +======== + +:meth:`~iris.tests.IrisTest_nometa.patch` is a wrapper around ``unittest.patch`` +that will be automatically cleaned up at the end of the test. + +Graphic tests +============= + +As a package capable of generating graphical outputs, Iris has utilities for +creating and updating graphical tests - see :ref:`testing.graphics` for more +information. \ No newline at end of file diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst deleted file mode 100644 index 81bff2f764..0000000000 --- a/docs/src/further_topics/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. _further topics: - -Introduction -============ - -Some specific areas of Iris may require further explanation or a deep dive -into additional detail above and beyond that offered by the -:ref:`User Guide `. - -This section provides a collection of additional material on focused topics -that may be of interest to the more advanced or curious user. - -.. hint:: - - If you wish further documentation on any specific topics or areas of Iris - that are missing, then please let us know by raising a :issue:`GitHub Documentation Issue` - on `SciTools/Iris`_. - - -* :doc:`metadata` -* :doc:`lenient_metadata` -* :doc:`lenient_maths` -* :ref:`ugrid` - - -.. _SciTools/iris: https://github.com/SciTools/iris diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 1b81f7055c..de1afb15af 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -1,3 +1,4 @@ +.. _further topics: .. _metadata: Metadata @@ -63,25 +64,26 @@ For example, the collective metadata used to define an ``var_name``, ``units``, and ``attributes`` members. Note that, these are the actual `data attribute`_ names of the metadata members on the Iris class. + .. _metadata members table: -.. table:: - Iris classes that model `CF Conventions`_ metadata +.. table:: Iris classes that model `CF Conventions`_ metadata :widths: auto :align: center - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== - Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata Members - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== - ``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``standard_name`` - ``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``long_name`` - ``var_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``var_name`` - ``units`` ✔ ✔ ✔ ✔ ✔ ✔ ``units`` - ``attributes`` ✔ ✔ ✔ ✔ ✔ ✔ ``attributes`` - ``coord_system`` ✔ ✔ ✔ ``coord_system`` - ``climatological`` ✔ ✔ ✔ ``climatological`` - ``measure`` ✔ ``measure`` - ``cell_methods`` ✔ ``cell_methods`` - ``circular`` ✔ ``circular`` - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== + Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== + ``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``var_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``units`` ✔ ✔ ✔ ✔ ✔ ✔ + ``attributes`` ✔ ✔ ✔ ✔ ✔ ✔ + ``coord_system`` ✔ ✔ ✔ + ``climatological`` ✔ ✔ ✔ + ``measure`` ✔ + ``cell_methods`` ✔ + ``circular`` ✔ + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== .. note:: diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index 4a2f64f627..cc3cc7b793 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -52,7 +52,7 @@ example. .. _data_structured_grid: .. figure:: images/data_structured_grid.svg :alt: Diagram of how data is represented on a structured grid - :align: right + :align: left :width: 1280 Data on a structured grid. @@ -131,7 +131,7 @@ example of what is described above. .. _data_ugrid_mesh: .. figure:: images/data_ugrid_mesh.svg :alt: Diagram of how data is represented on an unstructured mesh - :align: right + :align: left :width: 1280 Data on an unstructured mesh @@ -157,7 +157,7 @@ elements. See :numref:`ugrid_element_centres` for a visualised example. .. _ugrid_element_centres: .. figure:: images/ugrid_element_centres.svg :alt: Diagram demonstrating mesh face-centred data. - :align: right + :align: left :width: 1280 Data can be assigned to mesh edge/face/volume 'centres' @@ -180,7 +180,7 @@ Every node is completely independent - every one can have unique X andY (and Z) .. _ugrid_node_independence: .. figure:: images/ugrid_node_independence.svg :alt: Diagram demonstrating the independence of each mesh node - :align: right + :align: left :width: 300 Every mesh node is completely independent @@ -199,7 +199,7 @@ array. See :numref:`ugrid_variable_faces`. .. _ugrid_variable_faces: .. figure:: images/ugrid_variable_faces.svg :alt: Diagram demonstrating mesh faces with variable node counts - :align: right + :align: left :width: 300 Mesh faces can have different node counts (using masking) @@ -216,7 +216,7 @@ areas (faces). See :numref:`ugrid_edge_data`. .. _ugrid_edge_data: .. figure:: images/ugrid_edge_data.svg :alt: Diagram demonstrating data assigned to mesh edges - :align: right + :align: left :width: 300 Data can be assigned to mesh edges @@ -405,6 +405,9 @@ the :class:`~iris.cube.Cube`\'s unstructured dimension. Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge >>> print(edge_cube.location) edge diff --git a/docs/src/further_topics/ugrid/images/fesom_mesh.png b/docs/src/further_topics/ugrid/images/fesom_mesh.png new file mode 100644 index 0000000000..283899a94b Binary files /dev/null and b/docs/src/further_topics/ugrid/images/fesom_mesh.png differ diff --git a/docs/src/further_topics/ugrid/images/smc_mesh.png b/docs/src/further_topics/ugrid/images/smc_mesh.png new file mode 100644 index 0000000000..8c5a9d86eb Binary files /dev/null and b/docs/src/further_topics/ugrid/images/smc_mesh.png differ diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/further_topics/ugrid/index.rst index 81ba24428a..c45fd271a2 100644 --- a/docs/src/further_topics/ugrid/index.rst +++ b/docs/src/further_topics/ugrid/index.rst @@ -38,6 +38,7 @@ Read on to find out more... * :doc:`data_model` - learn why the mesh experience is so different. * :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. * :doc:`operations` - experience how your workflows will look when written for mesh data. +* :doc:`other_meshes` - check out some examples of converting various mesh formats into Iris' mesh format. .. Need an actual TOC to get Sphinx working properly, but have hidden it in @@ -50,5 +51,6 @@ Read on to find out more... data_model partner_packages operations + other_meshes __ CF-UGRID_ diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst index f96e3e406c..a4e0e593d7 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/further_topics/ugrid/operations.rst @@ -189,6 +189,9 @@ Creating a :class:`~iris.cube.Cube` is unchanged; the Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge Save @@ -392,6 +395,9 @@ etcetera: Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location face Attributes: Conventions 'CF-1.7' @@ -620,6 +626,9 @@ the link between :class:`~iris.cube.Cube` and Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge # Sub-setted MeshCoords have become AuxCoords. >>> print(edge_cube[:-1]) @@ -976,13 +985,26 @@ on dimensions other than the :meth:`~iris.cube.Cube.mesh_dim`, since such Arithmetic ---------- -.. |tagline: arithmetic| replace:: |pending| +.. |tagline: arithmetic| replace:: |unchanged| .. rubric:: |tagline: arithmetic| -:class:`~iris.cube.Cube` Arithmetic (described in :doc:`/userguide/cube_maths`) -has not yet been adapted to handle :class:`~iris.cube.Cube`\s that include -:class:`~iris.experimental.ugrid.MeshCoord`\s. +Cube Arithmetic (described in :doc:`/userguide/cube_maths`) +has been extended to handle :class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s, and hence have a ``cube.mesh``. + +Cubes with meshes can be combined in arithmetic operations like +"ordinary" cubes. They can combine with other cubes without that mesh +(and its dimension); or with a matching mesh, which may be on a different +dimension. +Arithmetic can also be performed between a cube with a mesh and a mesh +coordinate with a matching mesh. + +In all cases, the result will have the same mesh as the input cubes. + +Meshes only match if they are fully equal -- i.e. they contain all the same +coordinates and connectivities, with identical names, units, attributes and +data content. .. todo: diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/further_topics/ugrid/other_meshes.rst new file mode 100644 index 0000000000..e6f477624e --- /dev/null +++ b/docs/src/further_topics/ugrid/other_meshes.rst @@ -0,0 +1,225 @@ +.. _other_meshes: + +Converting Other Mesh Formats +***************************** + +Iris' Mesh Data Model is based primarily on the CF-UGRID conventions (see +:doc:`data_model`), but other mesh formats can be converted to fit into this +model, **enabling use of Iris' specialised mesh support**. Below are some +examples demonstrating how this works for various mesh formats. + +.. contents:: + :local: + +`FESOM 1.4`_ Voronoi Polygons +----------------------------- +.. figure:: images/fesom_mesh.png + :width: 300 + :alt: Sample of FESOM mesh voronoi polygons, with variable numbers of sides. + +A FESOM mesh encoded in a NetCDF file includes: + +* X+Y point coordinates +* X+Y corners coordinates of the Voronoi Polygons around these points - + represented as the bounds of the coordinates + +To represent the Voronoi Polygons as faces, the corner coordinates will be used +as the **nodes** when creating the Iris +:class:`~iris.experimental.ugrid.mesh.Mesh`. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> import iris + >>> from iris.experimental.ugrid import Mesh + + + >>> temperature_cube = iris.load_cube("my_file.nc", "sea_surface_temperature") + >>> print(temperature_cube) + sea_surface_temperature / (degC) (time: 12; -- : 126859) + Dimension coordinates: + time x - + Auxiliary coordinates: + latitude - x + longitude - x + Cell methods: + mean where sea area + mean time + Attributes: + grid 'FESOM 1.4 (unstructured grid in the horizontal with 126859 wet nodes;... + ... + + >>> print(temperature_cube.coord("longitude")) + AuxCoord : longitude / (degrees) + points: + bounds: + shape: (126859,) bounds(126859, 18) + dtype: float64 + standard_name: 'longitude' + var_name: 'lon' + + # Use a Mesh to represent the Cube's horizontal geography, by replacing + # the existing face AuxCoords with new MeshCoords. + >>> fesom_mesh = Mesh.from_coords(temperature_cube.coord('longitude'), + ... temperature_cube.coord('latitude')) + >>> for new_coord in fesom_mesh.to_MeshCoords("face"): + ... old_coord = temperature_cube.coord(new_coord.name()) + ... unstructured_dim, = old_coord.cube_dims(temperature_cube) + ... temperature_cube.remove_coord(old_coord) + ... temperature_cube.add_aux_coord(new_coord, unstructured_dim) + + >>> print(temperature_cube) + sea_surface_temperature / (degC) (time: 12; -- : 126859) + Dimension coordinates: + time x - + Mesh coordinates: + latitude - x + longitude - x + Cell methods: + mean where sea area + mean time + Attributes: + grid 'FESOM 1.4 (unstructured grid in the horizontal with 126859 wet nodes;... + ... + + >>> print(temperature_cube.mesh) + Mesh : 'unknown' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + shape(2283462,)> + shape(2283462,)> + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: shape(126859, 18)> + face coordinates + shape(126859,)> + shape(126859,)> + +`WAVEWATCH III`_ Spherical Multi-Cell (SMC) WAVE Quad Grid +---------------------------------------------------------- +.. figure:: images/smc_mesh.png + :width: 300 + :alt: Sample of an SMC mesh, with decreasing quad sizes at the coastlines. + +An SMC grid encoded in a NetCDF file includes: + +* X+Y face centre coordinates +* X+Y base face sizes +* X+Y face size factors + +From this information we can derive face corner coordinates, which will be used +as the **nodes** when creating the Iris +:class:`~iris.experimental.ugrid.mesh.Mesh`. + + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> import iris + >>> from iris.experimental.ugrid import Mesh + >>> import numpy as np + + + >>> wave_cube = iris.load_cube("my_file.nc", "sea_surface_wave_significant_height") + >>> print(wave_cube) + sea_surface_wave_significant_height / (m) (time: 7; -- : 666328) + Dimension coordinates: + time x - + Auxiliary coordinates: + forecast_period x - + latitude - x + latitude cell size factor - x + longitude - x + longitude cell size factor - x + Scalar coordinates: + forecast_reference_time 2021-12-05 00:00:00 + Attributes: + SIN4 namelist parameter BETAMAX 1.39 + SMC_grid_type 'seapoint' + WAVEWATCH_III_switches 'NOGRB SHRD PR2 UNO SMC FLX0 LN1 ST4 NL1 BT1 DB1 TR0 BS0 IC0 IS0 REF0 WNT1... + WAVEWATCH_III_version_number '7.13' + altitude_resolution 'n/a' + area 'Global wave model GS512L4EUK' + base_lat_size 0.029296871 + base_lon_size 0.043945305 + ... + + >>> faces_x = wave_cube.coord("longitude") + >>> faces_y = wave_cube.coord("latitude") + >>> face_size_factor_x = wave_cube.coord("longitude cell size factor") + >>> face_size_factor_y = wave_cube.coord("latitude cell size factor") + >>> base_x_size = wave_cube.attributes["base_lon_size"] + >>> base_y_size = wave_cube.attributes["base_lat_size"] + + # Calculate face corners from face centres and face size factors. + >>> face_centres_x = faces_x.points + >>> face_centres_y = faces_y.points + >>> face_size_x = face_size_factor_x.points * base_x_size + >>> face_size_y = face_size_factor_y.points * base_y_size + + >>> x_mins = (face_centres_x - 0.5 * face_size_x).reshape(-1, 1) + >>> x_maxs = (face_centres_x + 0.5 * face_size_x).reshape(-1, 1) + >>> y_mins = (face_centres_y - 0.5 * face_size_y).reshape(-1, 1) + >>> y_maxs = (face_centres_y + 0.5 * face_size_y).reshape(-1, 1) + + >>> face_corners_x = np.hstack([x_mins, x_maxs, x_maxs, x_mins]) + >>> face_corners_y = np.hstack([y_mins, y_mins, y_maxs, y_maxs]) + + # Add face corners as coordinate bounds. + >>> faces_x.bounds = face_corners_x + >>> faces_y.bounds = face_corners_y + + # Use a Mesh to represent the Cube's horizontal geography, by replacing + # the existing face AuxCoords with new MeshCoords. + >>> smc_mesh = Mesh.from_coords(faces_x, faces_y) + >>> for new_coord in smc_mesh.to_MeshCoords("face"): + ... old_coord = wave_cube.coord(new_coord.name()) + ... unstructured_dim, = old_coord.cube_dims(wave_cube) + ... wave_cube.remove_coord(old_coord) + ... wave_cube.add_aux_coord(new_coord, unstructured_dim) + + >>> print(wave_cube) + sea_surface_wave_significant_height / (m) (time: 7; -- : 666328) + Dimension coordinates: + time x - + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + forecast_period x - + latitude cell size factor - x + longitude cell size factor - x + Scalar coordinates: + forecast_reference_time 2021-12-05 00:00:00 + Attributes: + SIN4 namelist parameter BETAMAX 1.39 + SMC_grid_type 'seapoint' + WAVEWATCH_III_switches 'NOGRB SHRD PR2 UNO SMC FLX0 LN1 ST4 NL1 BT1 DB1 TR0 BS0 IC0 IS0 REF0 WNT1... + WAVEWATCH_III_version_number '7.13' + altitude_resolution 'n/a' + area 'Global wave model GS512L4EUK' + base_lat_size 0.029296871 + base_lon_size 0.043945305 + ... + + >>> print(wave_cube.mesh) + Mesh : 'unknown' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + +.. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3 +.. _FESOM 1.4: https://fesom.de/models/fesom14/ diff --git a/docs/src/getting_started.rst b/docs/src/getting_started.rst new file mode 100644 index 0000000000..24299a4060 --- /dev/null +++ b/docs/src/getting_started.rst @@ -0,0 +1,15 @@ +.. _getting_started_index: + +Getting Started +=============== + +To get started with Iris we recommend reading :ref:`why_iris` was created and to +explore the examples in the :ref:`gallery_index` after :ref:`installing_iris` +Iris. + +.. toctree:: + :maxdepth: 1 + + why_iris + installing + generated/gallery/index \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst index e6a787a220..b9f7faaa03 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -1,7 +1,9 @@ +.. include:: common_links.inc .. _iris_docs: -Iris |version| -======================== + +Iris +==== **A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data.** @@ -11,157 +13,137 @@ giving you a powerful, format-agnostic interface for working with your data. It excels when working with multi-dimensional Earth Science data, where tabular representations become unwieldy and inefficient. -`CF Standard names `_, -`units `_, and coordinate metadata -are built into Iris, giving you a rich and expressive interface for maintaining -an accurate representation of your data. Its treatment of data and -associated metadata as first-class objects includes: - -* visualisation interface based on `matplotlib `_ and - `cartopy `_, -* unit conversion, -* subsetting and extraction, -* merge and concatenate, -* aggregations and reductions (including min, max, mean and weighted averages), -* interpolation and regridding (including nearest-neighbor, linear and - area-weighted), and -* operator overloads (``+``, ``-``, ``*``, ``/``, etc.). - -A number of file formats are recognised by Iris, including CF-compliant NetCDF, -GRIB, and PP, and it has a plugin architecture to allow other formats to be -added seamlessly. - -Building upon `NumPy `_ and -`dask `_, Iris scales from efficient -single-machine workflows right through to multi-core clusters and HPC. -Interoperability with packages from the wider scientific Python ecosystem comes -from Iris' use of standard NumPy/dask arrays as its underlying data storage. - -Iris is part of SciTools, for more information see https://scitools.org.uk/. -For **Iris 2.4** and earlier documentation please see the -:link-badge:`https://scitools.org.uk/iris/docs/v2.4.0/,"legacy documentation",cls=badge-info text-white`. - +For more information see :ref:`why_iris`. .. panels:: :container: container-lg pb-3 - :column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2 + :column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2 text-center + :img-top-cls: w-50 m-auto px-1 py-2 - Install Iris as a user or developer. - +++ - .. link-button:: installing_iris - :type: ref - :text: Installing Iris - :classes: btn-outline-primary btn-block --- - Example code to create a variety of plots. + :img-top: _static/icon_shuttle.svg + + Information on Iris, how to install and a gallery of examples that + create plots. +++ - .. link-button:: sphx_glr_generated_gallery + .. link-button:: getting_started :type: ref - :text: Gallery - :classes: btn-outline-primary btn-block + :text: Getting Started + :classes: btn-outline-info btn-block + + --- - Find out what has recently changed in Iris. + :img-top: _static/icon_instructions.svg + + Learn how to use Iris, including loading, navigating, saving, + plotting and more. +++ - .. link-button:: iris_whatsnew + .. link-button:: user_guide_index :type: ref - :text: What's New - :classes: btn-outline-primary btn-block + :text: User Guide + :classes: btn-outline-info btn-block + --- - Learn how to use Iris. + :img-top: _static/icon_development.svg + + As a developer you can contribute to Iris. +++ - .. link-button:: user_guide_index + .. link-button:: development_where_to_start :type: ref - :text: User Guide - :classes: btn-outline-primary btn-block + :text: Developers Guide + :classes: btn-outline-info btn-block + --- + :img-top: _static/icon_api.svg + Browse full Iris functionality by module. +++ .. link-button:: Iris :type: ref :text: Iris API - :classes: btn-outline-primary btn-block + :classes: btn-outline-info btn-block + --- - As a developer you can contribute to Iris. + :img-top: _static/icon_new_product.svg + + Find out what has recently changed in Iris. +++ - .. link-button:: development_where_to_start + .. link-button:: iris_whatsnew + :type: ref + :text: What's New + :classes: btn-outline-info btn-block + + --- + :img-top: _static/icon_thumb.png + + Raise the profile of issues by voting on them. + +++ + .. link-button:: voted_issues_top :type: ref - :text: Getting Involved - :classes: btn-outline-primary btn-block + :text: Voted Issues + :classes: btn-outline-info btn-block -.. toctree:: - :maxdepth: 1 - :caption: Getting Started - :hidden: +Icons made by `FreePik `_ from +`Flaticon `_ + + +Support +~~~~~~~ + +We, the Iris developers have adopted `GitHub Discussions`_ to capture any +discussions or support questions related to Iris. + +See also `StackOverflow for "How Do I? `_ +that may be useful but we do not actively monitor this. + +The legacy support resources: - installing - generated/gallery/index +* `Users Google Group `_ +* `Developers Google Group `_ +* `Legacy Documentation`_ (Iris 2.4 or earlier) .. toctree:: + :caption: Getting Started :maxdepth: 1 - :caption: What's New in Iris :hidden: - whatsnew/latest - Archive + getting_started .. toctree:: - :maxdepth: 1 :caption: User Guide + :maxdepth: 1 :name: userguide_index :hidden: userguide/index - userguide/iris_cubes - userguide/loading_iris_cubes - userguide/saving_iris_cubes - userguide/navigating_a_cube - userguide/subsetting_a_cube - userguide/real_and_lazy_data - userguide/plotting_a_cube - userguide/interpolation_and_regridding - userguide/merge_and_concat - userguide/cube_statistics - userguide/cube_maths - userguide/citation - userguide/code_maintenance - - -.. _developers_guide: + .. toctree:: + :caption: Developers Guide :maxdepth: 1 - :caption: Further Topics + :name: developers_index :hidden: - further_topics/index - further_topics/metadata - further_topics/lenient_metadata - further_topics/lenient_maths - further_topics/ugrid/index + developers_guide/contributing_getting_involved .. toctree:: - :maxdepth: 2 - :caption: Developers Guide - :name: development_index + :caption: Iris API + :maxdepth: 1 :hidden: - developers_guide/contributing_getting_involved - developers_guide/gitwash/index - developers_guide/contributing_documentation - developers_guide/contributing_codebase_index - developers_guide/contributing_changes - developers_guide/release + generated/api/iris .. toctree:: + :caption: What's New in Iris :maxdepth: 1 - :caption: Reference + :name: whats_new_index :hidden: - generated/api/iris - techpapers/index - copyright + whatsnew/index + +.. todolist:: \ No newline at end of file diff --git a/docs/src/installing.rst b/docs/src/installing.rst index 37a8942ab3..6a2d2f6131 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -1,7 +1,7 @@ .. _installing_iris: -Installing Iris -=============== +Installing +========== Iris is available using conda for the following platforms: @@ -119,9 +119,9 @@ Running the Tests To ensure your setup is configured correctly you can run the test suite using the command:: - python setup.py test + pytest -For more information see :ref:`developer_running_tests`. +For more information see :ref:`test manual env`. Custom Site Configuration diff --git a/docs/src/sphinxext/image_test_output.py b/docs/src/sphinxext/image_test_output.py deleted file mode 100644 index 9e492a5be9..0000000000 --- a/docs/src/sphinxext/image_test_output.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -import json -import re -from typing import Dict, List - -from docutils import nodes -from sphinx.application import Sphinx -from sphinx.util.docutils import SphinxDirective - -ImageRepo = Dict[str, List[str]] - -HASH_MATCH = re.compile(r"([^\/]+)\.png$") - - -def hash_from_url(url: str) -> str: - match = HASH_MATCH.search(url) - if not match: - raise ValueError(f"url {url} does not match form `http...hash.png`") - else: - return match.groups()[0] - - -class ImageTestDirective(SphinxDirective): - def run(self): - with open(self.config["image_test_json"], "r") as fh: - imagerepo = json.load(fh) - enum_list = nodes.enumerated_list() - nodelist = [] - nodelist.append(enum_list) - for test in sorted(imagerepo): - link_node = nodes.raw( - "", - f'{test}', - format="html", - ) - li_node = nodes.list_item("") - li_node += link_node - enum_list += li_node - return nodelist - - -def collect_imagehash_pages(app: Sphinx): - """Generate pages for each entry in the imagerepo.json""" - with open(app.config["image_test_json"], "r") as fh: - imagerepo: ImageRepo = json.load(fh) - pages = [] - for test, hashfiles in imagerepo.items(): - hashstrs = [hash_from_url(h) for h in hashfiles] - pages.append( - ( - f"generated/image_test/{test}", - {"test": test, "hashfiles": zip(hashstrs, hashfiles)}, - "imagehash.html", - ) - ) - return pages - - -def setup(app: Sphinx): - app.add_config_value( - "image_test_json", - "../../lib/iris/tests/results/imagerepo.json", - "html", - ) - - app.add_directive("imagetest-list", ImageTestDirective) - app.connect("html-collect-pages", collect_imagehash_pages) - - return { - "version": "0.1", - "parallel_read_safe": True, - "parallel_write_safe": True, - } diff --git a/docs/src/techpapers/um_files_loading.rst b/docs/src/techpapers/um_files_loading.rst index 72d34962ce..f8c94cab08 100644 --- a/docs/src/techpapers/um_files_loading.rst +++ b/docs/src/techpapers/um_files_loading.rst @@ -350,7 +350,7 @@ information is contained in the :attr:`~iris.coords.Coord.units` property. always 1st Jan 1970 (times before this are represented as negative values). The units.calendar property of time coordinates is set from the lowest decimal -digit of LBTIM, known as LBTIM.IC. Note that the non-gregorian calendars (e.g. +digit of LBTIM, known as LBTIM.IC. Note that the non-standard calendars (e.g. 360-day 'model' calendar) are defined in CF, not udunits. There are a number of different time encoding methods used in UM data, but the diff --git a/docs/src/userguide/code_maintenance.rst b/docs/src/userguide/code_maintenance.rst index b2b498bc80..c01c1975a7 100644 --- a/docs/src/userguide/code_maintenance.rst +++ b/docs/src/userguide/code_maintenance.rst @@ -12,17 +12,17 @@ In practice, as Iris develops, most users will want to periodically upgrade their installed version to access new features or at least bug fixes. This is obvious if you are still developing other code that uses Iris, or using -code from other sources. +code from other sources. However, even if you have only legacy code that remains untouched, some code maintenance effort is probably still necessary: - * On the one hand, *in principle*, working code will go on working, as long - as you don't change anything else. +* On the one hand, *in principle*, working code will go on working, as long + as you don't change anything else. - * However, such "version stasis" can easily become a growing burden, if you - are simply waiting until an update becomes unavoidable, often that will - eventually occur when you need to update some other software component, - for some completely unconnected reason. +* However, such "version stasis" can easily become a growing burden, if you + are simply waiting until an update becomes unavoidable, often that will + eventually occur when you need to update some other software component, + for some completely unconnected reason. Principles of Change Management @@ -35,13 +35,13 @@ In Iris, however, we aim to reduce code maintenance problems to an absolute minimum by following defined change management rules. These ensure that, *within a major release number* : - * you can be confident that your code will still work with subsequent minor - releases +* you can be confident that your code will still work with subsequent minor + releases - * you will be aware of future incompatibility problems in advance +* you will be aware of future incompatibility problems in advance - * you can defer making code compatibility changes for some time, until it - suits you +* you can defer making code compatibility changes for some time, until it + suits you The above applies to minor version upgrades : e.g. code that works with version "1.4.2" should still work with a subsequent minor release such as "1.5.0" or diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst index e8a1744a44..fe9a5d63d2 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -38,7 +38,7 @@ Let's load some air temperature which runs from 1860 to 2100:: air_temp = iris.load_cube(filename, 'air_temperature') We can now get the first and last time slices using indexing -(see :ref:`subsetting_a_cube` for a reminder):: +(see :ref:`cube_indexing` for a reminder):: t_first = air_temp[0, :, :] t_last = air_temp[-1, :, :] diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index 2a3b32fe11..08923e7662 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -1,31 +1,47 @@ .. _user_guide_index: .. _user_guide_introduction: -Introduction -============ +User Guide +========== -If you are reading this user guide for the first time it is strongly recommended that you read the user guide -fully before experimenting with your own data files. +If you are reading this user guide for the first time it is strongly +recommended that you read the user guide fully before experimenting with your +own data files. - -Much of the content has supplementary links to the reference documentation; you will not need to follow these -links in order to understand the guide but they may serve as a useful reference for future exploration. +Much of the content has supplementary links to the reference documentation; +you will not need to follow these links in order to understand the guide but +they may serve as a useful reference for future exploration. .. only:: html - Since later pages depend on earlier ones, try reading this user guide sequentially using the ``next`` and ``previous`` links. - - -* :doc:`iris_cubes` -* :doc:`loading_iris_cubes` -* :doc:`saving_iris_cubes` -* :doc:`navigating_a_cube` -* :doc:`subsetting_a_cube` -* :doc:`real_and_lazy_data` -* :doc:`plotting_a_cube` -* :doc:`interpolation_and_regridding` -* :doc:`merge_and_concat` -* :doc:`cube_statistics` -* :doc:`cube_maths` -* :doc:`citation` -* :doc:`code_maintenance` + Since later pages depend on earlier ones, try reading this user guide + sequentially using the ``next`` and ``previous`` links at the bottom + of each page. + + +.. toctree:: + :maxdepth: 2 + + iris_cubes + loading_iris_cubes + saving_iris_cubes + navigating_a_cube + subsetting_a_cube + real_and_lazy_data + plotting_a_cube + interpolation_and_regridding + merge_and_concat + cube_statistics + cube_maths + citation + code_maintenance + + +.. toctree:: + :maxdepth: 2 + :caption: Further Topics + + ../further_topics/metadata + ../further_topics/lenient_metadata + ../further_topics/lenient_maths + ../further_topics/ugrid/index diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index f590485606..deae4427ed 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -19,14 +19,14 @@ In Iris we refer to the available types of interpolation and regridding as `schemes`. The following are the interpolation schemes that are currently available in Iris: - * linear interpolation (:class:`iris.analysis.Linear`), and - * nearest-neighbour interpolation (:class:`iris.analysis.Nearest`). +* linear interpolation (:class:`iris.analysis.Linear`), and +* nearest-neighbour interpolation (:class:`iris.analysis.Nearest`). The following are the regridding schemes that are currently available in Iris: - * linear regridding (:class:`iris.analysis.Linear`), - * nearest-neighbour regridding (:class:`iris.analysis.Nearest`), and - * area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). +* linear regridding (:class:`iris.analysis.Linear`), +* nearest-neighbour regridding (:class:`iris.analysis.Nearest`), and +* area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). The linear, nearest-neighbor, and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, the resulting cube @@ -42,8 +42,8 @@ Interpolation Interpolating a cube is achieved with the :meth:`~iris.cube.Cube.interpolate` method. This method expects two arguments: - #. the sample points to interpolate, and - #. the interpolation scheme to use. +#. the sample points to interpolate, and +#. the interpolation scheme to use. The result is a new cube, interpolated at the sample points. @@ -51,9 +51,9 @@ Sample points must be defined as an iterable of ``(coord, value(s))`` pairs. The `coord` argument can be either a coordinate name or coordinate instance. The specified coordinate must exist on the cube being interpolated! For example: - * coordinate names and scalar sample points: ``[('latitude', 51.48), ('longitude', 0)]``, - * a coordinate instance and a scalar sample point: ``[(cube.coord('latitude'), 51.48)]``, and - * a coordinate name and a NumPy array of sample points: ``[('longitude', np.linspace(-11, 2, 14))]`` +* coordinate names and scalar sample points: ``[('latitude', 51.48), ('longitude', 0)]``, +* a coordinate instance and a scalar sample point: ``[(cube.coord('latitude'), 51.48)]``, and +* a coordinate name and a NumPy array of sample points: ``[('longitude', np.linspace(-11, 2, 14))]`` are all examples of valid sample points. @@ -175,11 +175,11 @@ The extrapolation mode is controlled by the ``extrapolation_mode`` keyword. For the available interpolation schemes available in Iris, the ``extrapolation_mode`` keyword must be one of: - * ``extrapolate`` -- the extrapolation points will be calculated by extending the gradient of the closest two points, - * ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate, - * ``nan`` -- the extrapolation points will be be set to NaN, - * ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray, or - * ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. +* ``extrapolate`` -- the extrapolation points will be calculated by extending the gradient of the closest two points, +* ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate, +* ``nan`` -- the extrapolation points will be be set to NaN, +* ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray, or +* ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. Using an extrapolation mode is achieved by constructing an interpolation scheme with the extrapolation mode keyword set as required. The constructed scheme @@ -206,8 +206,8 @@ intensive part of an interpolation is setting up the interpolator. To cache an interpolator you must set up an interpolator scheme and call the scheme's interpolator method. The interpolator method takes as arguments: - #. a cube to be interpolated, and - #. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over. +#. a cube to be interpolated, and +#. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over. For example: @@ -244,8 +244,8 @@ regridding is based on the **horizontal** grid of *another cube*. Regridding a cube is achieved with the :meth:`cube.regrid() ` method. This method expects two arguments: - #. *another cube* that defines the target grid onto which the cube should be regridded, and - #. the regridding scheme to use. +#. *another cube* that defines the target grid onto which the cube should be regridded, and +#. the regridding scheme to use. .. note:: @@ -278,15 +278,15 @@ mode when defining the regridding scheme. For the available regridding schemes in Iris, the ``extrapolation_mode`` keyword must be one of: - * ``extrapolate`` -- +* ``extrapolate`` -- - * for :class:`~iris.analysis.Linear` the extrapolation points will be calculated by extending the gradient of the closest two points. - * for :class:`~iris.analysis.Nearest` the extrapolation points will take their value from the nearest source point. + * for :class:`~iris.analysis.Linear` the extrapolation points will be calculated by extending the gradient of the closest two points. + * for :class:`~iris.analysis.Nearest` the extrapolation points will take their value from the nearest source point. - * ``nan`` -- the extrapolation points will be be set to NaN. - * ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate. - * ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray. - * ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. +* ``nan`` -- the extrapolation points will be be set to NaN. +* ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate. +* ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray. +* ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. The ``rotated_psl`` cube is defined on a limited area rotated pole grid. If we regridded the ``rotated_psl`` cube onto the global grid as defined by the ``global_air_temp`` cube @@ -395,8 +395,8 @@ intensive part of a regrid is setting up the regridder. To cache a regridder you must set up a regridder scheme and call the scheme's regridder method. The regridder method takes as arguments: - #. a cube (that is to be regridded) defining the source grid, and - #. a cube defining the target grid to regrid the source cube to. +#. a cube (that is to be regridded) defining the source grid, and +#. a cube defining the target grid to regrid the source cube to. For example: diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index d13dee369c..29d8f3cefc 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -4,82 +4,105 @@ Iris Data Structures ==================== -The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon. +The top level object in Iris is called a cube. A cube contains data and +metadata about a phenomenon. -In Iris, a cube is an interpretation of the *Climate and Forecast (CF) Metadata Conventions* whose purpose is to: +In Iris, a cube is an interpretation of the *Climate and Forecast (CF) +Metadata Conventions* whose purpose is to: - *require conforming datasets to contain sufficient metadata that they are self-describing... including physical - units if appropriate, and that each value can be located in space (relative to earth-based coordinates) and time.* +.. panels:: + :container: container-lg pb-3 + :column: col-lg-12 p-2 -Whilst the CF conventions are often mentioned alongside NetCDF, Iris implements several major format importers which can take -files of specific formats and turn them into Iris cubes. Additionally, a framework is provided which allows users -to extend Iris' import capability to cater for specialist or unimplemented formats. + *require conforming datasets to contain sufficient metadata that they are + self-describing... including physical units if appropriate, and that each + value can be located in space (relative to earth-based coordinates) and + time.* -A single cube describes one and only one phenomenon, always has a name, a unit and -an n-dimensional data array to represents the cube's phenomenon. In order to locate the -data spatially, temporally, or in any other higher-dimensional space, a collection of *coordinates* -exist on the cube. + +Whilst the CF conventions are often mentioned alongside NetCDF, Iris implements +several major format importers which can take files of specific formats and +turn them into Iris cubes. Additionally, a framework is provided which allows +users to extend Iris' import capability to cater for specialist or +unimplemented formats. + +A single cube describes one and only one phenomenon, always has a name, a unit +and an n-dimensional data array to represents the cube's phenomenon. In order +to locate the data spatially, temporally, or in any other higher-dimensional +space, a collection of *coordinates* exist on the cube. Coordinates =========== -A coordinate is a container to store metadata about some dimension(s) of a cube's data array and therefore, -by definition, its phenomenon. - - * Each coordinate has a name and a unit. - * When a coordinate is added to a cube, the data dimensions that it represents are also provided. - - * The shape of a coordinate is always the same as the shape of the associated data dimension(s) on the cube. - * A dimension not explicitly listed signifies that the coordinate is independent of that dimension. - * Each dimension of a coordinate must be mapped to a data dimension. The only coordinates with no mapping are - scalar coordinates. - - * Depending on the underlying data that the coordinate is representing, its values may be discrete points or be - bounded to represent interval extents (e.g. temperature at *point x* **vs** rainfall accumulation *between 0000-1200 hours*). - * Coordinates have an attributes dictionary which can hold arbitrary extra metadata, excluding certain restricted CF names - * More complex coordinates may contain a coordinate system which is necessary to fully interpret the values - contained within the coordinate. - +A coordinate is a container to store metadata about some dimension(s) of a +cube's data array and therefore, by definition, its phenomenon. + +* Each coordinate has a name and a unit. +* When a coordinate is added to a cube, the data dimensions that it + represents are also provided. + + * The shape of a coordinate is always the same as the shape of the + associated data dimension(s) on the cube. + * A dimension not explicitly listed signifies that the coordinate is + independent of that dimension. + * Each dimension of a coordinate must be mapped to a data dimension. The + only coordinates with no mapping are scalar coordinates. + +* Depending on the underlying data that the coordinate is representing, its + values may be discrete points or be bounded to represent interval extents + (e.g. temperature at *point x* **vs** rainfall accumulation *between + 0000-1200 hours*). +* Coordinates have an attributes dictionary which can hold arbitrary extra + metadata, excluding certain restricted CF names +* More complex coordinates may contain a coordinate system which is + necessary to fully interpret the values contained within the coordinate. + There are two classes of coordinates: - **DimCoord** - - * Numeric - * Monotonic - * Representative of, at most, a single data dimension (1d) +**DimCoord** + +* Numeric +* Monotonic +* Representative of, at most, a single data dimension (1d) + +**AuxCoord** + +* May be of any type, including strings +* May represent multiple data dimensions (n-dimensional) - **AuxCoord** - - * May be of any type, including strings - * May represent multiple data dimensions (n-dimensional) - Cube ==== A cube consists of: - * a standard name and/or a long name and an appropriate unit - * a data array who's values are representative of the phenomenon - * a collection of coordinates and associated data dimensions on the cube's data array, which are split into two separate lists: +* a standard name and/or a long name and an appropriate unit +* a data array who's values are representative of the phenomenon +* a collection of coordinates and associated data dimensions on the cube's + data array, which are split into two separate lists: + + * *dimension coordinates* - DimCoords which uniquely map to exactly one + data dimension, ordered by dimension. + * *auxiliary coordinates* - DimCoords or AuxCoords which map to as many + data dimensions as the coordinate has dimensions. - * *dimension coordinates* - DimCoords which uniquely map to exactly one data dimension, ordered by dimension. - * *auxiliary coordinates* - DimCoords or AuxCoords which map to as many data dimensions as the coordinate has dimensions. - - * an attributes dictionary which, other than some protected CF names, can hold arbitrary extra metadata. - * a list of cell methods to represent operations which have already been applied to the data (e.g. "mean over time") - * a list of coordinate "factories" used for deriving coordinates from the values of other coordinates in the cube +* an attributes dictionary which, other than some protected CF names, can + hold arbitrary extra metadata. +* a list of cell methods to represent operations which have already been + applied to the data (e.g. "mean over time") +* a list of coordinate "factories" used for deriving coordinates from the + values of other coordinates in the cube Cubes in Practice ----------------- - A Simple Cube Example ===================== -Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at -4 different longitudes, 2 different latitudes and 3 different heights. Our data array can be represented pictorially: +Suppose we have some gridded data which has 24 air temperature readings +(in Kelvin) which is located at 4 different longitudes, 2 different latitudes +and 3 different heights. Our data array can be represented pictorially: .. image:: multi_array.png @@ -87,61 +110,66 @@ Where dimensions 0, 1, and 2 have lengths 3, 2 and 4 respectively. The Iris cube to represent this data would consist of: - * a standard name of ``air_temperature`` and a unit of ``kelvin`` - * a data array of shape ``(3, 2, 4)`` - * a coordinate, mapping to dimension 0, consisting of: - - * a standard name of ``height`` and unit of ``meters`` - * an array of length 3 representing the 3 ``height`` points - - * a coordinate, mapping to dimension 1, consisting of: - - * a standard name of ``latitude`` and unit of ``degrees`` - * an array of length 2 representing the 2 latitude points - * a coordinate system such that the ``latitude`` points could be fully located on the globe - - * a coordinate, mapping to dimension 2, consisting of: - - * a standard name of ``longitude`` and unit of ``degrees`` - * an array of length 4 representing the 4 longitude points - * a coordinate system such that the ``longitude`` points could be fully located on the globe - +* a standard name of ``air_temperature`` and a unit of ``kelvin`` +* a data array of shape ``(3, 2, 4)`` +* a coordinate, mapping to dimension 0, consisting of: + + * a standard name of ``height`` and unit of ``meters`` + * an array of length 3 representing the 3 ``height`` points +* a coordinate, mapping to dimension 1, consisting of: + * a standard name of ``latitude`` and unit of ``degrees`` + * an array of length 2 representing the 2 latitude points + * a coordinate system such that the ``latitude`` points could be fully + located on the globe -Pictorially the cube has taken on more information than a simple array: +* a coordinate, mapping to dimension 2, consisting of: + + * a standard name of ``longitude`` and unit of ``degrees`` + * an array of length 4 representing the 4 longitude points + * a coordinate system such that the ``longitude`` points could be fully + located on the globe + +Pictorially the cube has taken on more information than a simple array: .. image:: multi_array_to_cube.png -Additionally further information may be optionally attached to the cube. -For example, it is possible to attach any of the following: - - * a coordinate, not mapping to any data dimensions, consisting of: - - * a standard name of ``time`` and unit of ``days since 2000-01-01 00:00`` - * a data array of length 1 representing the time that the data array is valid for - - * an auxiliary coordinate, mapping to dimensions 1 and 2, consisting of: - - * a long name of ``place name`` and no unit - * a 2d string array of shape ``(2, 4)`` with the names of the 8 places that the lat/lons correspond to - - * an auxiliary coordinate "factory", which can derive its own mapping, consisting of: - - * a standard name of ``height`` and a unit of ``feet`` - * knowledge of how data values for this coordinate can be calculated given the ``height in meters`` coordinate - - * a cell method of "mean" over "ensemble" to indicate that the data has been meaned over - a collection of "ensembles" (i.e. multiple model runs). +Additionally further information may be optionally attached to the cube. +For example, it is possible to attach any of the following: + +* a coordinate, not mapping to any data dimensions, consisting of: + + * a standard name of ``time`` and unit of ``days since 2000-01-01 00:00`` + * a data array of length 1 representing the time that the data array is + valid for + +* an auxiliary coordinate, mapping to dimensions 1 and 2, consisting of: + + * a long name of ``place name`` and no unit + * a 2d string array of shape ``(2, 4)`` with the names of the 8 places + that the lat/lons correspond to + +* an auxiliary coordinate "factory", which can derive its own mapping, + consisting of: + + * a standard name of ``height`` and a unit of ``feet`` + * knowledge of how data values for this coordinate can be calculated + given the ``height in meters`` coordinate + +* a cell method of "mean" over "ensemble" to indicate that the data has been + meaned over a collection of "ensembles" (i.e. multiple model runs). Printing a Cube =============== -Every Iris cube can be printed to screen as you will see later in the user guide. It is worth familiarising yourself with the -output as this is the quickest way of inspecting the contents of a cube. Here is the result of printing a real life cube: +Every Iris cube can be printed to screen as you will see later in the user +guide. It is worth familiarising yourself with the output as this is the +quickest way of inspecting the contents of a cube. Here is the result of +printing a real life cube: .. _hybrid_cube_printout: @@ -150,7 +178,7 @@ output as this is the quickest way of inspecting the contents of a cube. Here is import iris filename = iris.sample_data_path('uk_hires.pp') - # NOTE: Every time the output of this cube changes, the full list of deductions below should be re-assessed. + # NOTE: Every time the output of this cube changes, the full list of deductions below should be re-assessed. print(iris.load_cube(filename, 'air_potential_temperature')) .. testoutput:: @@ -178,16 +206,22 @@ output as this is the quickest way of inspecting the contents of a cube. Here is Using this output we can deduce that: - * The cube represents air potential temperature. - * There are 4 data dimensions, and the data has a shape of ``(3, 7, 204, 187)`` - * The 4 data dimensions are mapped to the ``time``, ``model_level_number``, - ``grid_latitude``, ``grid_longitude`` coordinates respectively - * There are three 1d auxiliary coordinates and one 2d auxiliary (``surface_altitude``) - * There is a single ``altitude`` derived coordinate, which spans 3 data dimensions - * There are 7 distinct values in the "model_level_number" coordinate. Similar inferences can - be made for the other dimension coordinates. - * There are 7, not necessarily distinct, values in the ``level_height`` coordinate. - * There is a single ``forecast_reference_time`` scalar coordinate representing the entire cube. - * The cube has one further attribute relating to the phenomenon. - In this case the originating file format, PP, encodes information in a STASH code which in some cases can - be useful for identifying advanced experiment information relating to the phenomenon. +* The cube represents air potential temperature. +* There are 4 data dimensions, and the data has a shape of ``(3, 7, 204, 187)`` +* The 4 data dimensions are mapped to the ``time``, ``model_level_number``, + ``grid_latitude``, ``grid_longitude`` coordinates respectively +* There are three 1d auxiliary coordinates and one 2d auxiliary + (``surface_altitude``) +* There is a single ``altitude`` derived coordinate, which spans 3 data + dimensions +* There are 7 distinct values in the "model_level_number" coordinate. Similar + inferences can + be made for the other dimension coordinates. +* There are 7, not necessarily distinct, values in the ``level_height`` + coordinate. +* There is a single ``forecast_reference_time`` scalar coordinate representing + the entire cube. +* The cube has one further attribute relating to the phenomenon. + In this case the originating file format, PP, encodes information in a STASH + code which in some cases can be useful for identifying advanced experiment + information relating to the phenomenon. diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index fb938975e8..33ad932d70 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -39,15 +39,15 @@ This shows that there were 2 cubes as a result of loading the file, they were: The ``surface_altitude`` cube was 2 dimensional with: - * the two dimensions have extents of 204 and 187 respectively and are - represented by the ``grid_latitude`` and ``grid_longitude`` coordinates. +* the two dimensions have extents of 204 and 187 respectively and are + represented by the ``grid_latitude`` and ``grid_longitude`` coordinates. The ``air_potential_temperature`` cubes were 4 dimensional with: - * the same length ``grid_latitude`` and ``grid_longitude`` dimensions as - ``surface_altitide`` - * a ``time`` dimension of length 3 - * a ``model_level_number`` dimension of length 7 +* the same length ``grid_latitude`` and ``grid_longitude`` dimensions as + ``surface_altitide`` +* a ``time`` dimension of length 3 +* a ``model_level_number`` dimension of length 7 .. note:: @@ -55,7 +55,7 @@ The ``air_potential_temperature`` cubes were 4 dimensional with: (even if it only contains one :class:`iris.cube.Cube` - see :ref:`strict-loading`). Anything that can be done with a Python :class:`list` can be done with an :class:`iris.cube.CubeList`. - + The order of this list should not be relied upon. Ways of loading a specific cube or cubes are covered in :ref:`constrained-loading` and :ref:`strict-loading`. @@ -206,241 +206,8 @@ a specific ``model_level_number``:: level_10 = iris.Constraint(model_level_number=10) cubes = iris.load(filename, level_10) -Constraints can be combined using ``&`` to represent a more restrictive -constraint to ``load``:: - - filename = iris.sample_data_path('uk_hires.pp') - forecast_6 = iris.Constraint(forecast_period=6) - level_10 = iris.Constraint(model_level_number=10) - cubes = iris.load(filename, forecast_6 & level_10) - -.. note:: - - Whilst ``&`` is supported, the ``|`` that might reasonably be expected is - not. Explanation as to why is in the :class:`iris.Constraint` reference - documentation. - - For an example of constraining to multiple ranges of the same coordinate to - generate one cube, see the :class:`iris.Constraint` reference documentation. - - To generate multiple cubes, each constrained to a different range of the - same coordinate, use :py:func:`iris.load_cubes`. - -As well as being able to combine constraints using ``&``, -the :class:`iris.Constraint` class can accept multiple arguments, -and a list of values can be given to constrain a coordinate to one of -a collection of values:: - - filename = iris.sample_data_path('uk_hires.pp') - level_10_or_16_fp_6 = iris.Constraint(model_level_number=[10, 16], forecast_period=6) - cubes = iris.load(filename, level_10_or_16_fp_6) - -A common requirement is to limit the value of a coordinate to a specific range, -this can be achieved by passing the constraint a function:: - - def bottom_16_levels(cell): - # return True or False as to whether the cell in question should be kept - return cell <= 16 - - filename = iris.sample_data_path('uk_hires.pp') - level_lt_16 = iris.Constraint(model_level_number=bottom_16_levels) - cubes = iris.load(filename, level_lt_16) - -.. note:: - - As with many of the examples later in this documentation, the - simple function above can be conveniently written as a lambda function - on a single line:: - - bottom_16_levels = lambda cell: cell <= 16 - - -Note also the :ref:`warning on equality constraints with floating point coordinates `. - - -Cube attributes can also be part of the constraint criteria. Supposing a -cube attribute of ``STASH`` existed, as is the case when loading ``PP`` files, -then specific STASH codes can be filtered:: - - filename = iris.sample_data_path('uk_hires.pp') - level_10_with_stash = iris.AttributeConstraint(STASH='m01s00i004') & iris.Constraint(model_level_number=10) - cubes = iris.load(filename, level_10_with_stash) - -.. seealso:: - - For advanced usage there are further examples in the - :class:`iris.Constraint` reference documentation. - - -Constraining a Circular Coordinate Across its Boundary -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Occasionally you may need to constrain your cube with a region that crosses the -boundary of a circular coordinate (this is often the meridian or the dateline / -antimeridian). An example use-case of this is to extract the entire Pacific Ocean -from a cube whose longitudes are bounded by the dateline. - -This functionality cannot be provided reliably using constraints. Instead you should use the -functionality provided by :meth:`cube.intersection ` -to extract this region. - - -.. _using-time-constraints: - -Constraining on Time -^^^^^^^^^^^^^^^^^^^^ -Iris follows NetCDF-CF rules in representing time coordinate values as normalised, -purely numeric, values which are normalised by the calendar specified in the coordinate's -units (e.g. "days since 1970-01-01"). -However, when constraining by time we usually want to test calendar-related -aspects such as hours of the day or months of the year, so Iris -provides special features to facilitate this: - -Firstly, when Iris evaluates Constraint expressions, it will convert time-coordinate -values (points and bounds) from numbers into :class:`~datetime.datetime`-like objects -for ease of calendar-based testing. - - >>> filename = iris.sample_data_path('uk_hires.pp') - >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') - >>> print('All times :\n' + str(cube_all.coord('time'))) - All times : - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] - shape: (3,) - dtype: float64 - standard_name: 'time' - >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). - >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) - >>> cube_11 = cube_all.extract(hour_11) - >>> print('Selected times :\n' + str(cube_11.coord('time'))) - Selected times : - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 11:00:00] - shape: (1,) - dtype: float64 - standard_name: 'time' - -Secondly, the :class:`iris.time` module provides flexible time comparison -facilities. An :class:`iris.time.PartialDateTime` object can be compared to -objects such as :class:`datetime.datetime` instances, and this comparison will -then test only those 'aspects' which the PartialDateTime instance defines: - - >>> import datetime - >>> from iris.time import PartialDateTime - >>> dt = datetime.datetime(2011, 3, 7) - >>> print(dt > PartialDateTime(year=2010, month=6)) - True - >>> print(dt > PartialDateTime(month=6)) - False - >>> - -These two facilities can be combined to provide straightforward calendar-based -time selections when loading or extracting data. - -The previous constraint example can now be written as: - - >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) - >>> print(iris.load_cube( - ... iris.sample_data_path('uk_hires.pp'), - ... 'air_potential_temperature' & the_11th_hour).coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 11:00:00] - shape: (1,) - dtype: float64 - standard_name: 'time' - -It is common that a cube will need to be constrained between two given dates. -In the following example we construct a time sequence representing the first -day of every week for many years: - -.. testsetup:: timeseries_range - - import datetime - import numpy as np - from iris.time import PartialDateTime - long_ts = iris.cube.Cube(np.arange(150), long_name='data', units='1') - _mondays = iris.coords.DimCoord(7 * np.arange(150), standard_name='time', units='days since 2007-04-09') - long_ts.add_dim_coord(_mondays, 0) - - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> print(long_ts.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., - 2010-02-08 00:00:00, 2010-02-15 00:00:00] - shape: (150,) - dtype: int64 - standard_name: 'time' - -Given two dates in datetime format, we can select all points between them. - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> d1 = datetime.datetime.strptime('20070715T0000Z', '%Y%m%dT%H%MZ') - >>> d2 = datetime.datetime.strptime('20070825T0000Z', '%Y%m%dT%H%MZ') - >>> st_swithuns_daterange_07 = iris.Constraint( - ... time=lambda cell: d1 <= cell.point < d2) - >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) - >>> print(within_st_swithuns_07.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] - shape: (6,) - dtype: int64 - standard_name: 'time' - -Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` -objects. - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> pdt1 = PartialDateTime(year=2007, month=7, day=15) - >>> pdt2 = PartialDateTime(year=2007, month=8, day=25) - >>> st_swithuns_daterange_07 = iris.Constraint( - ... time=lambda cell: pdt1 <= cell.point < pdt2) - >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) - >>> print(within_st_swithuns_07.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] - shape: (6,) - dtype: int64 - standard_name: 'time' - -A more complex example might require selecting points over an annually repeating -date range. We can select points within a certain part of the year, in this case -between the 15th of July through to the 25th of August. By making use of -PartialDateTime this becomes simple: - -.. doctest:: timeseries_range - - >>> st_swithuns_daterange = iris.Constraint( - ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell < PartialDateTime(month=8, day=25)) - >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) - ... - >>> # Note: using summary(max_values) to show more of the points - >>> print(within_st_swithuns.coord('time').summary(max_values=100)) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, - 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, - 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, - 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, - 2009-08-17 00:00:00, 2009-08-24 00:00:00] - shape: (17,) - dtype: int64 - standard_name: 'time' - -Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` -and that they span multiple years. +Further details on using :class:`iris.Constraint` are +discussed later in :ref:`cube_extraction`. .. _strict-loading: diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/userguide/merge_and_concat.rst index e8425df5ec..08c3ce9711 100644 --- a/docs/src/userguide/merge_and_concat.rst +++ b/docs/src/userguide/merge_and_concat.rst @@ -22,14 +22,14 @@ result in fewer cubes as output. The following diagram illustrates the two proce There is one major difference between the ``merge`` and ``concatenate`` processes. - * The ``merge`` process combines multiple input cubes into a - single resultant cube with new dimensions created from the - *scalar coordinate values* of the input cubes. - - * The ``concatenate`` process combines multiple input cubes into a - single resultant cube with the same *number of dimensions* as the input cubes, - but with the length of one or more dimensions extended by *joining together - sequential dimension coordinates*. +* The ``merge`` process combines multiple input cubes into a + single resultant cube with new dimensions created from the + *scalar coordinate values* of the input cubes. + +* The ``concatenate`` process combines multiple input cubes into a + single resultant cube with the same *number of dimensions* as the input cubes, + but with the length of one or more dimensions extended by *joining together + sequential dimension coordinates*. Let's imagine 28 individual cubes representing the temperature at a location ``(y, x)``; one cube for each day of February. We can use diff --git a/docs/src/userguide/plotting_a_cube.rst b/docs/src/userguide/plotting_a_cube.rst index cfb3445d9b..a2334367c5 100644 --- a/docs/src/userguide/plotting_a_cube.rst +++ b/docs/src/userguide/plotting_a_cube.rst @@ -101,15 +101,15 @@ see :py:func:`matplotlib.pyplot.savefig`). Some of the formats which are supported by **plt.savefig**: - ====== ====== ====================================================================== - Format Type Description - ====== ====== ====================================================================== - EPS Vector Encapsulated PostScript - PDF Vector Portable Document Format - PNG Raster Portable Network Graphics, a format with a lossless compression method - PS Vector PostScript, ideal for printer output - SVG Vector Scalable Vector Graphics, XML based - ====== ====== ====================================================================== +====== ====== ====================================================================== +Format Type Description +====== ====== ====================================================================== +EPS Vector Encapsulated PostScript +PDF Vector Portable Document Format +PNG Raster Portable Network Graphics, a format with a lossless compression method +PS Vector PostScript, ideal for printer output +SVG Vector Scalable Vector Graphics, XML based +====== ====== ====================================================================== ****************** Iris Cube Plotting @@ -125,12 +125,12 @@ wrapper functions. As a rule of thumb: - * if you wish to do a visualisation with a cube, use ``iris.plot`` or - ``iris.quickplot``. - * if you wish to show, save or manipulate **any** visualisation, - including ones created with Iris, use ``matplotlib.pyplot``. - * if you wish to create a non cube visualisation, also use - ``matplotlib.pyplot``. +* if you wish to do a visualisation with a cube, use ``iris.plot`` or + ``iris.quickplot``. +* if you wish to show, save or manipulate **any** visualisation, + including ones created with Iris, use ``matplotlib.pyplot``. +* if you wish to create a non cube visualisation, also use + ``matplotlib.pyplot``. The ``iris.quickplot`` module is exactly the same as the ``iris.plot`` module, except that ``quickplot`` will add a title, x and y labels and a colorbar diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index 0bc1846457..9d66a2f086 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -140,11 +140,11 @@ Core Data Cubes have the concept of "core data". This returns the cube's data in its current state: - * If a cube has lazy data, calling the cube's :meth:`~iris.cube.Cube.core_data` method - will return the cube's lazy dask array. Calling the cube's - :meth:`~iris.cube.Cube.core_data` method **will never realise** the cube's data. - * If a cube has real data, calling the cube's :meth:`~iris.cube.Cube.core_data` method - will return the cube's real NumPy array. +* If a cube has lazy data, calling the cube's :meth:`~iris.cube.Cube.core_data` method + will return the cube's lazy dask array. Calling the cube's + :meth:`~iris.cube.Cube.core_data` method **will never realise** the cube's data. +* If a cube has real data, calling the cube's :meth:`~iris.cube.Cube.core_data` method + will return the cube's real NumPy array. For example:: @@ -174,14 +174,14 @@ In the same way that Iris cubes contain a data array, Iris coordinates contain a points array and an optional bounds array. Coordinate points and bounds arrays can also be real or lazy: - * A :class:`~iris.coords.DimCoord` will only ever have **real** points and bounds - arrays because of monotonicity checks that realise lazy arrays. - * An :class:`~iris.coords.AuxCoord` can have **real or lazy** points and bounds. - * An :class:`~iris.aux_factory.AuxCoordFactory` (or derived coordinate) - can have **real or lazy** points and bounds. If all of the - :class:`~iris.coords.AuxCoord` instances used to construct the derived coordinate - have real points and bounds then the derived coordinate will have real points - and bounds, otherwise the derived coordinate will have lazy points and bounds. +* A :class:`~iris.coords.DimCoord` will only ever have **real** points and bounds + arrays because of monotonicity checks that realise lazy arrays. +* An :class:`~iris.coords.AuxCoord` can have **real or lazy** points and bounds. +* An :class:`~iris.aux_factory.AuxCoordFactory` (or derived coordinate) + can have **real or lazy** points and bounds. If all of the + :class:`~iris.coords.AuxCoord` instances used to construct the derived coordinate + have real points and bounds then the derived coordinate will have real points + and bounds, otherwise the derived coordinate will have lazy points and bounds. Iris cubes and coordinates have very similar interfaces, which extends to accessing coordinates' lazy points and bounds: diff --git a/docs/src/userguide/subsetting_a_cube.rst b/docs/src/userguide/subsetting_a_cube.rst index 5112d9689a..c4f55490af 100644 --- a/docs/src/userguide/subsetting_a_cube.rst +++ b/docs/src/userguide/subsetting_a_cube.rst @@ -10,9 +10,10 @@ However it is often necessary to reduce the dimensionality of a cube down to som Iris provides several ways of reducing both the amount of data and/or the number of dimensions in your cube depending on the circumstance. In all cases **the subset of a valid cube is itself a valid cube**. +.. _cube_extraction: Cube Extraction -^^^^^^^^^^^^^^^^ +--------------- A subset of a cube can be "extracted" from a multi-dimensional cube in order to reduce its dimensionality: >>> import iris @@ -34,15 +35,14 @@ A subset of a cube can be "extracted" from a multi-dimensional cube in order to In this example we start with a 3 dimensional cube, with dimensions of ``height``, ``grid_latitude`` and ``grid_longitude``, -and extract every point where the latitude is 0, resulting in a 2d cube with axes of ``height`` and ``grid_longitude``. - +and use :class:`iris.Constraint` to extract every point where the latitude is 0, resulting in a 2d cube with axes of ``height`` and ``grid_longitude``. .. _floating-point-warning: .. warning:: Caution is required when using equality constraints with floating point coordinates such as ``grid_latitude``. Printing the points of a coordinate does not necessarily show the full precision of the underlying number and it - is very easy return no matches to a constraint when one was expected. + is very easy to return no matches to a constraint when one was expected. This can be avoided by using a function as the argument to the constraint:: def near_zero(cell): @@ -68,6 +68,33 @@ The two steps required to get ``height`` of 9000 m at the equator can be simplif equator_height_9km_slice = cube.extract(iris.Constraint(grid_latitude=0, height=9000)) print(equator_height_9km_slice) +Alternatively, constraints can be combined using ``&``:: + + cube = iris.load_cube(filename, 'electron density') + equator_constraint = iris.Constraint(grid_latitude=0) + height_constraint = iris.Constraint(height=9000) + equator_height_9km_slice = cube.extract(equator_constraint & height_constraint) + +.. note:: + + Whilst ``&`` is supported, the ``|`` that might reasonably be expected is + not. Explanation as to why is in the :class:`iris.Constraint` reference + documentation. + + For an example of constraining to multiple ranges of the same coordinate to + generate one cube, see the :class:`iris.Constraint` reference documentation. + +A common requirement is to limit the value of a coordinate to a specific range, +this can be achieved by passing the constraint a function:: + + def below_9km(cell): + # return True or False as to whether the cell in question should be kept + return cell <= 9000 + + cube = iris.load_cube(filename, 'electron density') + height_below_9km = iris.Constraint(height=below_9km) + below_9km_slice = cube.extract(height_below_9km) + As we saw in :doc:`loading_iris_cubes` the result of :func:`iris.load` is a :class:`CubeList `. The ``extract`` method also exists on a :class:`CubeList ` and behaves in exactly the same way as loading with constraints: @@ -100,9 +127,203 @@ same way as loading with constraints: source 'Data from Met Office Unified Model' um_version '7.3' +Cube attributes can also be part of the constraint criteria. Supposing a +cube attribute of ``STASH`` existed, as is the case when loading ``PP`` files, +then specific STASH codes can be filtered:: + + filename = iris.sample_data_path('uk_hires.pp') + level_10_with_stash = iris.AttributeConstraint(STASH='m01s00i004') & iris.Constraint(model_level_number=10) + cubes = iris.load(filename).extract(level_10_with_stash) + +.. seealso:: + + For advanced usage there are further examples in the + :class:`iris.Constraint` reference documentation. + +Constraining a Circular Coordinate Across its Boundary +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Occasionally you may need to constrain your cube with a region that crosses the +boundary of a circular coordinate (this is often the meridian or the dateline / +antimeridian). An example use-case of this is to extract the entire Pacific Ocean +from a cube whose longitudes are bounded by the dateline. + +This functionality cannot be provided reliably using constraints. Instead you should use the +functionality provided by :meth:`cube.intersection ` +to extract this region. + + +.. _using-time-constraints: + +Constraining on Time +^^^^^^^^^^^^^^^^^^^^ +Iris follows NetCDF-CF rules in representing time coordinate values as normalised, +purely numeric, values which are normalised by the calendar specified in the coordinate's +units (e.g. "days since 1970-01-01"). +However, when constraining by time we usually want to test calendar-related +aspects such as hours of the day or months of the year, so Iris +provides special features to facilitate this. + +Firstly, when Iris evaluates :class:`iris.Constraint` expressions, it will convert +time-coordinate values (points and bounds) from numbers into :class:`~datetime.datetime`-like +objects for ease of calendar-based testing. + + >>> filename = iris.sample_data_path('uk_hires.pp') + >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') + >>> print('All times :\n' + str(cube_all.coord('time'))) + All times : + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' + >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). + >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) + >>> cube_11 = cube_all.extract(hour_11) + >>> print('Selected times :\n' + str(cube_11.coord('time'))) + Selected times : + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' + +Secondly, the :class:`iris.time` module provides flexible time comparison +facilities. An :class:`iris.time.PartialDateTime` object can be compared to +objects such as :class:`datetime.datetime` instances, and this comparison will +then test only those 'aspects' which the PartialDateTime instance defines: + + >>> import datetime + >>> from iris.time import PartialDateTime + >>> dt = datetime.datetime(2011, 3, 7) + >>> print(dt > PartialDateTime(year=2010, month=6)) + True + >>> print(dt > PartialDateTime(month=6)) + False + +These two facilities can be combined to provide straightforward calendar-based +time selections when loading or extracting data. + +The previous constraint example can now be written as: + + >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) + >>> print(iris.load_cube( + ... iris.sample_data_path('uk_hires.pp'), + ... 'air_potential_temperature' & the_11th_hour).coord('time')) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' + +It is common that a cube will need to be constrained between two given dates. +In the following example we construct a time sequence representing the first +day of every week for many years: + +.. testsetup:: timeseries_range + + import datetime + import numpy as np + from iris.time import PartialDateTime + long_ts = iris.cube.Cube(np.arange(150), long_name='data', units='1') + _mondays = iris.coords.DimCoord(7 * np.arange(150), standard_name='time', units='days since 2007-04-09') + long_ts.add_dim_coord(_mondays, 0) + + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> print(long_ts.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., + 2010-02-08 00:00:00, 2010-02-15 00:00:00] + shape: (150,) + dtype: int64 + standard_name: 'time' + +Given two dates in datetime format, we can select all points between them. +Instead of constraining at loaded time, we already have the time coord so +we constrain that coord using :class:`iris.cube.Cube.extract` + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> d1 = datetime.datetime.strptime('20070715T0000Z', '%Y%m%dT%H%MZ') + >>> d2 = datetime.datetime.strptime('20070825T0000Z', '%Y%m%dT%H%MZ') + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: d1 <= cell.point < d2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' + +Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` +objects. + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> pdt1 = PartialDateTime(year=2007, month=7, day=15) + >>> pdt2 = PartialDateTime(year=2007, month=8, day=25) + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: pdt1 <= cell.point < pdt2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' + +A more complex example might require selecting points over an annually repeating +date range. We can select points within a certain part of the year, in this case +between the 15th of July through to the 25th of August. By making use of +PartialDateTime this becomes simple: + +.. doctest:: timeseries_range + + >>> st_swithuns_daterange = iris.Constraint( + ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell.point < PartialDateTime(month=8, day=25)) + >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) + ... + >>> # Note: using summary(max_values) to show more of the points + >>> print(within_st_swithuns.coord('time').summary(max_values=100)) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, + 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, + 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, + 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, + 2009-08-17 00:00:00, 2009-08-24 00:00:00] + shape: (17,) + dtype: int64 + standard_name: 'time' + +Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` +and that they span multiple years. + +The above examples involve constraining on the points of the time coordinate. Constraining +on bounds can be done in the following way:: + + filename = iris.sample_data_path('ostia_monthly.nc') + cube = iris.load_cube(filename, 'surface_temperature') + dtmin = datetime.datetime(2008, 1, 1) + cube.extract(iris.Constraint(time = lambda cell: any(bound > dtmin for bound in cell.bound))) + +The above example constrains to cells where either the upper or lower bound occur +after 1st January 2008. Cube Iteration -^^^^^^^^^^^^^^^ +-------------- It is not possible to directly iterate over an Iris cube. That is, you cannot use code such as ``for x in cube:``. However, you can iterate over cube slices, as this section details. @@ -151,9 +372,10 @@ slicing the 3 dimensional cube (15, 100, 100) by longitude (i starts at 0 and 15 Once the your code can handle a 2d slice, it is then an easy step to loop over **all** 2d slices within the bigger cube using the slices method. +.. _cube_indexing: Cube Indexing -^^^^^^^^^^^^^ +------------- In the same way that you would expect a numeric multidimensional array to be **indexed** to take a subset of your original array, you can **index** a Cube for the same purpose. diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst new file mode 100644 index 0000000000..7d983448b9 --- /dev/null +++ b/docs/src/voted_issues.rst @@ -0,0 +1,56 @@ +.. include:: common_links.inc + +.. _voted_issues_top: + +Voted Issues +============ + +You can help us to prioritise development of new features by leaving a 👍 +reaction on the header (not subsequent comments) of any issue. + +.. tip:: We suggest you subscribe to the issue so you will be updated. + When viewing the issue there is a **Notifications** + section where you can select to subscribe. + +Below is a sorted table of all issues that have 1 or more 👍 from our github +project. Please note that there is more development activity than what is on +the below table. + +.. _voted-issues.json: https://github.com/scitools/voted_issues/blob/main/voted-issues.json + +.. raw:: html + + + + + + + + + + +
👍IssueAuthorTitle
+ + + + +

+ + +.. note:: The data in this table is updated every 30 minutes and is sourced + from `voted-issues.json`_. + For the latest data please see the `issues on GitHub`_. + Note that the list on Github does not show the number of votes 👍 + only the total number of comments for the whole issue. \ No newline at end of file diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 771a602954..223ef60011 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -97,9 +97,8 @@ v3.0.2 (27 May 2021) from collaborators targeting the Iris ``master`` branch. (:pull:`4007`) [``pre-v3.1.0``] - #. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) [``pre-v3.1.0``] + #. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) [``pre-v3.1.0``] #. `@pp-mo`_ adjusted the use of :func:`dask.array.from_array` in :func:`iris._lazy_data.as_lazy_data`, to avoid the dask 'test access'. This makes loading of netcdf files with a diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index bd046a0a24..1f076572bc 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -227,9 +227,8 @@ This document explains the changes made to Iris for this release #. `@akuhnregnier`_ replaced `deprecated numpy 1.20 aliases for builtin types`_. (:pull:`3997`) -#. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) +#. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) #. `@bjlittle`_ and `@jamesp`_ addressed a regression in behaviour when using `conda`_ 4.10.0 within `cirrus-ci`_. (:pull:`4084`) @@ -291,9 +290,8 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ enabled `cirrus-ci`_ compute credits for non-draft pull-requests from collaborators targeting the Iris ``master`` branch. (:pull:`4007`) -#. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) +#. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/3.2.rst similarity index 92% rename from docs/src/whatsnew/dev.rst rename to docs/src/whatsnew/3.2.rst index e2d4c2bc0b..723f26345e 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/3.2.rst @@ -1,13 +1,13 @@ .. include:: ../common_links.inc -|iris_version| |build_date| [unreleased] -**************************************** +v3.2 (15 Feb 2022) +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` |iris_version| Release Highlights +.. dropdown:: :opticon:`report` v3.2.0 Release Highlights :container: + shadow :title: text-primary text-center font-weight-bold :body: bg-light @@ -18,14 +18,37 @@ This document explains the changes made to Iris for this release * We've added experimental support for :ref:`Meshes `, which can now be loaded and - attached to a cube. Mesh support is based on the based on `CF-UGRID`_ - model. + attached to a cube. Mesh support is based on the `CF-UGRID`_ model. * We've also dropped support for ``Python 3.7``. And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! +v3.2.1 (11 Mar 2022) +==================== + +.. dropdown:: :opticon:`alert` v3.2.1 Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + + 📢 **Welcome** to `@dennissergeev`_, who made his first contribution to Iris. Nice work! + + The patches in this release of Iris include: + + 🐛 **Bugs Fixed** + + #. `@dennissergeev`_ changed _crs_distance_differentials() so that it uses the `Globe` + attribute from a given CRS instead of creating a new `ccrs.Globe()` object. + Iris can now handle non-Earth semi-major axes, as discussed in :issue:`4582` (:pull:`4605`). + + #. `@trexfeathers`_ avoided a dimensionality mismatch when streaming the + :attr:`~iris.coords.Coord.bounds` array for a scalar + :class:`~iris.coords.Coord`. (:pull:`4610`). + + 📢 Announcements ================ @@ -103,7 +126,7 @@ This document explains the changes made to Iris for this release of Iris (:issue:`4523`). #. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations - from `Metarelate`_. From now we intend to manage phenonemon translation + from ``Metarelate``. From now we intend to manage phenonemon translation in Iris itself. (:pull:`4484`) #. `@pp-mo`_ improved printout of various cube data component objects : @@ -175,9 +198,12 @@ This document explains the changes made to Iris for this release from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) #. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` - to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 + to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 (:issue:`3305`, :pull:`4535`) +#. `@lbdreyer`_ fixed a bug in :class:`iris.io.load_http` which was missing an import + (:pull:`4580`) + 💣 Incompatible Changes ======================= @@ -263,7 +289,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery example. (:pull:`4120`) -#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in +#. `@trexfeathers`_ included Iris `GitHub Discussions`_ in :ref:`get involved `. (:pull:`4307`) #. `@wjbenfold`_ improved readability in :ref:`userguide interpolation @@ -349,7 +375,7 @@ This document explains the changes made to Iris for this release #. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) -#. `@aaronspring `_ exchanged ``dask`` with +#. `@aaronspring`_ exchanged ``dask`` with ``dask-core`` in testing environments reducing the number of dependencies installed for testing. (:pull:`4434`) @@ -366,6 +392,7 @@ This document explains the changes made to Iris for this release .. _@aaronspring: https://github.com/aaronspring .. _@akuhnregnier: https://github.com/akuhnregnier .. _@bsherratt: https://github.com/bsherratt +.. _@dennissergeev: https://github.com/dennissergeev .. _@larsbarring: https://github.com/larsbarring .. _@pdearnshaw: https://github.com/pdearnshaw .. _@SimonPeatman: https://github.com/SimonPeatman @@ -375,7 +402,6 @@ This document explains the changes made to Iris for this release Whatsnew resources in alphabetical order: .. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _Metarelate: http://www.metarelate.net/ .. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ .. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid .. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba diff --git a/docs/src/whatsnew/3.3.rst b/docs/src/whatsnew/3.3.rst new file mode 100644 index 0000000000..5812b79860 --- /dev/null +++ b/docs/src/whatsnew/3.3.rst @@ -0,0 +1,341 @@ +.. include:: ../common_links.inc + +v3.3 (1 Sep 2022) +***************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` v3.3.0 Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We've added support for datums, loading them from NetCDF when the + :obj:`iris.FUTURE.datum_support` flag is set. + * We've greatly improved the speed of linear interpolation. + * We've added the function :func:`iris.pandas.as_cubes` for richer + conversion from Pandas. + * We've improved the functionality of :func:`iris.util.mask_cube`. + * We've improved the functionality and performance of the + :obj:`iris.analysis.PERCENTILE` aggregator. + * We've completed implementation of our :ref:`contributing.benchmarks` + infrastructure. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Welcome to `@krikru`_ who made their first contribution to Iris 🎉 + + +✨ Features +=========== + +#. `@schlunma`_ added weighted aggregation over "group coordinates": + :meth:`~iris.cube.Cube.aggregated_by` now accepts the keyword `weights` if a + :class:`~iris.analysis.WeightedAggregator` is used. (:issue:`4581`, + :pull:`4589`) + +#. `@wjbenfold`_ added support for ``false_easting`` and ``false_northing`` to + :class:`~iris.coord_systems.Mercator`. (:issue:`3107`, :pull:`4524`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) implemented lazy aggregation for the + :obj:`iris.analysis.PERCENTILE` aggregator. (:pull:`3901`) + +#. `@pp-mo`_ fixed cube arithmetic operation for cubes with meshes. + (:issue:`4454`, :pull:`4651`) + +#. `@wjbenfold`_ added support for CF-compliant treatment of + ``standard_parallel`` and ``scale_factor_at_projection_origin`` to + :class:`~iris.coord_systems.Mercator`. (:issue:`3844`, :pull:`4609`) + +#. `@wjbenfold`_ added support datums associated with coordinate systems (e.g. + :class:`~iris.coord_systems.GeogCS` other subclasses of + :class:`~iris.coord_systems.CoordSystem`). Loading of datum information from + a netCDF file only happens when the :obj:`iris.FUTURE.datum_support` flag is + set. (:issue:`4619`, :pull:`4704`) + +#. `@wjbenfold`_ and `@stephenworsley`_ (reviewer) added a maximum run length + aggregator (:class:`~iris.analysis.MAX_RUN`). (:pull:`4676`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) added a ``climatological`` keyword to + :meth:`~iris.cube.Cube.aggregated_by` that causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. (:issue:`1422`, :issue:`4098`, :issue:`4665`, + :pull:`4723`) + +#. `@wjbenfold`_ and `@pp-mo`_ (reviewer) implemented the + :class:`~iris.coord_systems.PolarStereographic` CRS. (:issue:`4770`, + :pull:`4773`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) enabled passing of the + :func:`numpy.percentile` keywords through the :obj:`~iris.analysis.PERCENTILE` + aggregator. (:pull:`4791`) + +#. `@wjbenfold`_ and `@bjlittle`_ (reviewer) implemented + :func:`iris.plot.fill_between` and :func:`iris.quickplot.fill_between`. + (:issue:`3493`, :pull:`4647`) + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) re-wrote :func:`iris.util.mask_cube` + to provide lazy evaluation and greater flexibility with respect to input types. + (:issue:`3936`, :pull:`4889`) + +#. `@stephenworsley`_ and `@lbdreyer`_ added a new kwarg ``expand_extras`` to + :func:`iris.util.new_axis` which can be used to specify instances of + :class:`~iris.coords.AuxCoord`, :class:`~iris.coords.CellMeasure` and + :class:`~iris.coords.AncillaryVariable` which should also be expanded to map + to the new axis. (:pull:`4896`) + +#. `@stephenworsley`_ updated to the latest CF Standard Names Table ``v79`` + (19 March 2022). (:pull:`4910`) + +#. `@trexfeathers`_ and `@lbdreyer`_ (reviewer) added + :func:`iris.pandas.as_cubes`, which provides richer conversion from + Pandas :class:`~pandas.Series` / :class:`~pandas.DataFrame`\s to one or more + :class:`~iris.cube.Cube`\s. This includes: n-dimensional datasets, + :class:`~iris.coords.AuxCoord`\s, :class:`~iris.coords.CellMeasure`\s, + :class:`~iris.coords.AncillaryVariable`\s, and multi-dimensional + coordinates. (:pull:`4890`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ reverted part of the change from :pull:`3906` so that + :func:`iris.plot.plot` no longer defaults to placing a "Y" coordinate (e.g. + latitude) on the y-axis of the plot. (:issue:`4493`, :pull:`4601`) + +#. `@rcomer`_ enabled passing of scalar objects to :func:`~iris.plot.plot` and + :func:`~iris.plot.scatter`. (:pull:`4616`) + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.aggregated_by` with `mdtol` for 1D + cubes where an aggregated section is entirely masked, reported at + :issue:`3190`. (:pull:`4246`) + +#. `@rcomer`_ ensured that a :class:`matplotlib.axes.Axes`'s position is preserved + when Iris replaces it with a :class:`cartopy.mpl.geoaxes.GeoAxes`, fixing + :issue:`1157`. (:pull:`4273`) + +#. `@rcomer`_ fixed :meth:`~iris.coords.Coord.nearest_neighbour_index` for edge + cases where the requested point is float and the coordinate has integer + bounds, reported at :issue:`2969`. (:pull:`4245`) + +#. `@rcomer`_ modified bounds setting on :obj:`~iris.coords.DimCoord` instances + so that the order of the cell bounds is automatically reversed + to match the coordinate's direction if necessary. This is consistent with + the `Bounds for 1-D coordinate variables` subsection of the `Cell Boundaries`_ + section of the CF Conventions and ensures that contiguity is preserved if a + coordinate's direction is reversed. (:issue:`3249`, :issue:`423`, + :issue:`4078`, :issue:`3756`, :pull:`4466`) + +#. `@wjbenfold`_ and `@evertrol`_ prevented an ``AttributeError`` being logged + to ``stderr`` when a :class:`~iris.fileformats.cf.CFReader` that fails to + initialise is garbage collected. (:issue:`3312`, :pull:`4646`) + +#. `@wjbenfold`_ fixed plotting of circular coordinates to extend kwarg arrays + as well as the data. (:issue:`466`, :pull:`4649`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) corrected the axis on which masking + is applied when an aggregator adds a trailing dimension. (:pull:`4755`) + +#. `@rcomer`_ and `@pp-mo`_ ensured that all methods to create or modify a + :class:`iris.cube.CubeList` check that it only contains cubes. According to + code comments, this was supposedly already the case, but there were several bugs + and loopholes. (:issue:`1897`, :pull:`4767`) + +#. `@rcomer`_ modified cube arithmetic to handle mismatches in the cube's data + array type. This prevents masks being lost in some cases and therefore + resolves :issue:`2987`. (:pull:`3790`) + +#. `@krikru`_ and `@rcomer`_ updated :mod:`iris.quickplot` such that the + colorbar is added to the correct ``axes`` when specified as a keyword + argument to a plotting routine. Otherwise, by default the colorbar will be + added to the current axes of the current figure. (:pull:`4894`) + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) modified :func:`iris.util.mask_cube` so it + either works in place or returns a new cube (:issue:`3717`, :pull:`4889`) + + +💣 Incompatible Changes +======================= + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) updated Iris's calendar handling to be + consistent with ``cf-units`` version 3.1. In line with the `Calendar`_ + section in version 1.9 of the CF Conventions, we now use "standard" rather + than the deprecated "gregorian" label for the default calendar. Units may + still be instantiated with ``calendar="gregorian"`` but their calendar + attribute will be silently changed to "standard". This may cause failures in + code that explicitly checks the calendar attribute. (:pull:`4847`) + + +🚀 Performance +============== + +#. `@wjbenfold`_ added caching to the calculation of the points array in a + :class:`~iris.coords.DimCoord` created using + :meth:`~iris.coords.DimCoord.from_regular`. (:pull:`4698`) + +#. `@wjbenfold`_ introduced caching in :func:`_lazy_data._optimum_chunksize` and + :func:`iris.fileformats.pp_load_rules._epoch_date_hours` to reduce time spent + repeating calculations. (:pull:`4716`) + +#. `@pp-mo`_ made :meth:`~iris.cube.Cube.add_aux_factory` faster. + (:pull:`4718`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) permitted the fast percentile + aggregation method to be used on masked data when the missing data tolerance + is set to 0. (:issue:`4735`, :pull:`4755`) + +#. `@wjbenfold`_ improved the speed of linear interpolation using + :meth:`iris.analysis.trajectory.interpolate` (:pull:`4366`) + +#. NumPy ``v1.23`` behaviour changes mean that + :func:`iris.experimental.ugrid.utils.recombine_submeshes` now uses ~3x as + much memory; testing shows a ~16-million point mesh will now use ~600MB. + Investigated by `@pp-mo`_ and `@trexfeathers`_. (:issue:`4845`) + + +🔥 Deprecations +=============== + +#. `@trexfeathers`_ and `@lbdreyer`_ (reviewer) deprecated + :func:`iris.pandas.as_cube` in favour of the new + :func:`iris.pandas.as_cubes` - see `✨ Features`_ for more details. + (:pull:`4890`) + + +🔗 Dependencies +=============== + +#. `@rcomer`_ introduced the ``nc-time-axis >=1.4`` minimum pin, reflecting that + we no longer use the deprecated :class:`nc_time_axis.CalendarDateTime` + when plotting against time coordinates. (:pull:`4584`) + +#. `@wjbenfold`_ and `@bjlittle`_ (reviewer) unpinned ``pillow``. (:pull:`4826`) + +#. `@rcomer`_ introduced the ``cf-units >=3.1`` minimum pin, reflecting the + alignment of calendar behaviour in the two packages (see Incompatible Changes). + (:pull:`4847`) + +#. `@bjlittle`_ introduced the ``sphinx-gallery >=0.11.0`` minimum pin. + (:pull:`4885`) + +#. `@trexfeathers`_ updated the install process to work with setuptools + ``>=v64``, making ``v64`` the minimum compatible version. (:pull:`4903`) + +#. `@stephenworsley`_ and `@trexfeathers`_ introduced the ``shapely !=1.8.3`` + pin, avoiding a bug caused by its interaction with cartopy. + (:pull:`4911`, :pull:`4917`) + + +📚 Documentation +================ + +#. `@tkknight`_ added a page to show the issues that have been voted for. See + :ref:`voted_issues_top`. (:issue:`3307`, :pull:`4617`) + +#. `@wjbenfold`_ added a note about fixing proxy URLs in lockfiles generated + because dependencies have changed. (:pull:`4666`) + +#. `@lbdreyer`_ moved most of the User Guide's :class:`iris.Constraint` examples + from :ref:`loading_iris_cubes` to :ref:`cube_extraction` and added an + example of constraining on bounded time. (:pull:`4656`) + +#. `@tkknight`_ adopted the `PyData Sphinx Theme`_ for the documentation. + (:discussion:`4344`, :pull:`4661`) + +#. `@tkknight`_ updated our developers guidance to show our intent to adopt + numpydoc strings and fixed some API documentation rendering. + See :ref:`docstrings`. (:issue:`4657`, :pull:`4689`) + +#. `@trexfeathers`_ and `@lbdreyer`_ added a page with examples of converting + various mesh formats into the Iris Mesh Data Model. (:pull:`4739`) + +#. `@rcomer`_ updated the "Load a Time Series of Data From the NEMO Model" + gallery example. (:pull:`4741`) + +#. `@wjbenfold`_ added developer documentation to highlight some of the + utilities offered by :class:`iris.IrisTest` and how to update CML and other + output files. (:issue:`4544`, :pull:`4600`) + +#. `@trexfeathers`_ and `@abooton`_ modernised the Iris logo to be SVG format. + (:pull:`3935`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ and `@pp-mo`_ finished implementing a mature benchmarking + infrastructure (see :ref:`contributing.benchmarks`), building on 2 hard + years of lessons learned 🎉. (:pull:`4477`, :pull:`4562`, :pull:`4571`, + :pull:`4583`, :pull:`4621`) + +#. `@wjbenfold`_ used the aforementioned benchmarking infrastructure to + introduce deep (large 3rd dimension) loading and realisation benchmarks. + (:pull:`4654`) + +#. `@wjbenfold`_ made :func:`iris.tests.stock.simple_1d` respect the + ``with_bounds`` argument. (:pull:`4658`) + +#. `@lbdreyer`_ replaced `nose`_ with `pytest`_ as Iris' test runner. + (:pull:`4734`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) migrated to GitHub Actions + for Continuous-Integration. (:pull:`4503`) + +#. `@pp-mo`_ made tests run certain linux executables from the Python env, + specifically ncdump and ncgen. These could otherwise fail when run in IDEs + such as PyCharm and Eclipse, which don't automatically include the Python env + bin in the system PATH. + (:pull:`4794`) + +#. `@trexfeathers`_ and `@pp-mo`_ improved generation of stock NetCDF files. + (:pull:`4827`, :pull:`4836`) + +#. `@rcomer`_ removed some now redundant testing functions. (:pull:`4838`, + :pull:`4878`) + +#. `@bjlittle`_ and `@jamesp`_ (reviewer) and `@lbdreyer`_ (reviewer) extended + the GitHub Continuous-Integration to cover testing on ``py38``, ``py39``, + and ``py310``. (:pull:`4840`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) adopted `setuptools-scm`_ for + automated ``iris`` package versioning. (:pull:`4841`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) added building, testing and + publishing of ``iris`` PyPI ``sdist`` and binary ``wheels`` as part of + our GitHub Continuous-Integration. (:pull:`4849`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) used ``pytest`` parametrization to + streamline the gallery test code. (:pull:`4792`) + +#. `@trexfeathers`_ improved settings to better working with + ``setuptools_scm``. (:pull:`4925`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@evertrol: https://github.com/evertrol +.. _@krikru: https://github.com/krikru + + +.. comment + Whatsnew resources in alphabetical order: + +.. _Calendar: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.9/cf-conventions.html#calendar +.. _Cell Boundaries: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.9/cf-conventions.html#cell-boundaries +.. _nose: https://nose.readthedocs.io +.. _PyData Sphinx Theme: https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html +.. _pytest: https://docs.pytest.org +.. _setuptools-scm: https://github.com/pypa/setuptools_scm diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index 51f03e8d8f..8cff21f32f 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -1,16 +1,19 @@ +.. include:: ../common_links.inc + .. _iris_whatsnew: What's New in Iris -****************** - -These "What's new" pages describe the important changes between major -Iris versions. +------------------ +.. include:: latest.rst .. toctree:: :maxdepth: 1 + :hidden: - dev.rst + latest.rst + 3.3.rst + 3.2.rst 3.1.rst 3.0.rst 2.4.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst deleted file mode 120000 index 56aebe92dd..0000000000 --- a/docs/src/whatsnew/latest.rst +++ /dev/null @@ -1 +0,0 @@ -dev.rst \ No newline at end of file diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 100644 index 0000000000..a420494157 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1,120 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` |iris_version| Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Welcome to `@ESadek-MO`_ and `@TTV-Intrepid`_ who made their first contributions to Iris 🎉 + + +✨ Features +=========== + +#. `@ESadek-MO`_ edited :func:`~iris.io.expand_filespecs` to allow expansion of + non-existing paths, and added expansion functionality to :func:`~iris.io.save`. + (:issue:`4772`, :pull:`4913`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ and `@pp-mo`_ (reviewer) factored masking into the returned + sum-of-weights calculation from :obj:`~iris.analysis.SUM`. (:pull:`4905`) + +#. `@schlunma`_ fixed a bug which prevented using + :meth:`iris.cube.Cube.collapsed` on coordinates whose number of bounds + differs from 0 or 2. This enables the use of this method on mesh + coordinates. (:issue:`4672`, :pull:`4870`) + +#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) fixed the building of the CF + Standard Names module ``iris.std_names`` for the ``setup.py`` commands + ``develop`` and ``std_names``. (:issue:`4951`, :pull:`4952`) + +#. `@lbdreyer`_ and `@pp-mo`_ (reviewer) fixed the cube print out such that + scalar ancillary variables are displayed in a dedicated section rather than + being added to the vector ancillary variables section. Further, ancillary + variables and cell measures that map to a cube dimension of length 1 are now + included in the respective vector sections. (:pull:`4945`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. `@rcomer`_ and `@pp-mo`_ (reviewer) increased aggregation speed for + :obj:`~iris.analysis.SUM`, :obj:`~iris.analysis.COUNT` and + :obj:`~iris.analysis.PROPORTION` on real data. (:pull:`4905`) + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. `@rcomer`_ introduced the ``dask >=2.26`` minimum pin, so that Iris can benefit + from Dask's support for `NEP13`_ and `NEP18`_. (:pull:`4905`) + + +📚 Documentation +================ + +#. `@ESadek-MO`_, `@TTV-Intrepid`_ and `@trexfeathers`_ added a gallery example for zonal + means plotted parallel to a cartographic plot. (:pull:`4871`) + + +💼 Internal +=========== + +#. `@rcomer`_ removed the obsolete ``setUpClass`` method from Iris testing. + (:pull:`4927`) + +#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) removed support for + ``python setup.py test``, which is a deprecated approach to executing + package tests, see `pypa/setuptools#1684`_. Also performed assorted + ``setup.py`` script hygiene. (:pull:`4948`, :pull:`4949`, :pull:`4950`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@TTV-Intrepid: https://github.com/TTV-Intrepid + + + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP13: https://numpy.org/neps/nep-0013-ufunc-overrides.html +.. _NEP18: https://numpy.org/neps/nep-0018-array-function-protocol.html +.. _pypa/setuptools#1684: https://github.com/pypa/setuptools/issues/1684 \ No newline at end of file diff --git a/docs/src/whatsnew/dev.rst.template b/docs/src/whatsnew/latest.rst.template similarity index 99% rename from docs/src/whatsnew/dev.rst.template rename to docs/src/whatsnew/latest.rst.template index 79c578ca65..661ee47f50 100644 --- a/docs/src/whatsnew/dev.rst.template +++ b/docs/src/whatsnew/latest.rst.template @@ -42,7 +42,7 @@ v3.X.X (DD MMM YYYY) NOTE: section above is a template for bugfix patches ==================================================== (Please remove this section when creating an initial 'latest.rst') - + 📢 Announcements diff --git a/docs/src/why_iris.rst b/docs/src/why_iris.rst new file mode 100644 index 0000000000..63a515f68e --- /dev/null +++ b/docs/src/why_iris.rst @@ -0,0 +1,44 @@ +.. _why_iris: + +Why Iris +======== + +**A powerful, format-agnostic, community-driven Python package for analysing +and visualising Earth science data.** + +Iris implements a data model based on the `CF conventions `_ +giving you a powerful, format-agnostic interface for working with your data. +It excels when working with multi-dimensional Earth Science data, where tabular +representations become unwieldy and inefficient. + +`CF Standard names `_, +`units `_, and coordinate metadata +are built into Iris, giving you a rich and expressive interface for maintaining +an accurate representation of your data. Its treatment of data and +associated metadata as first-class objects includes: + +.. rst-class:: squarelist + +* visualisation interface based on `matplotlib `_ and + `cartopy `_, +* unit conversion, +* subsetting and extraction, +* merge and concatenate, +* aggregations and reductions (including min, max, mean and weighted averages), +* interpolation and regridding (including nearest-neighbor, linear and + area-weighted), and +* operator overloads (``+``, ``-``, ``*``, ``/``, etc.). + +A number of file formats are recognised by Iris, including CF-compliant NetCDF, +GRIB, and PP, and it has a plugin architecture to allow other formats to be +added seamlessly. + +Building upon `NumPy `_ and +`dask `_, Iris scales from efficient +single-machine workflows right through to multi-core clusters and HPC. +Interoperability with packages from the wider scientific Python ecosystem comes +from Iris' use of standard NumPy/dask arrays as its underlying data storage. + +Iris is part of SciTools, for more information see https://scitools.org.uk/. +For **Iris 2.4** and earlier documentation please see the +:link-badge:`https://scitools.org.uk/iris/docs/v2.4.0/,"legacy documentation",cls=badge-info text-white`. diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index bd76168192..9c5fcd9cf0 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 78 - 2021-09-21T11:55:06Z + 79 + 2022-03-19T15:25:54Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -8014,6 +8014,20 @@ The phrase "magnitude_of_X" means magnitude of a vector X. The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19’-butanoyloxyfucoxanthin is C46H64O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BUTAXXXX/1/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19'-hexanoyloxyfucoxanthin is C48H68O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/HEXAXXXX/2/. + + kg m-3 @@ -8028,6 +8042,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ATPXZZDZ/2/. + + kg m-3 @@ -8042,6 +8063,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of alpha-carotene is C40H56. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BECAXXP1/2/. + + kg m-3 @@ -8112,6 +8140,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of beta-carotene is C40H56. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BBCAXXP1/2/. + + kg m-3 @@ -8217,6 +8252,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Carotene" refers to the sum of all forms of the carotenoid pigment carotene. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CAROXXXX/1/. + + kg m-3 @@ -8287,6 +8329,41 @@ 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLBXXPX/2/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll c1c2 (sometimes written c1-c2 or c1+c2) means the sum of chlorophyll c1 and chlorophyll c2. The chemical formula of chlorophyll c1 is C35H30MgN4O5, and chlorophyll c2 is C35H28MgN4O5. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLC12PX/3/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. The chemical formula of chlorophyll c3 is C36H44MgN4O7. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLC03PX/2/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-c means chlorophyll c1+c2+c3. The chemical formula of chlorophyll c1 is C35H30MgN4O5, and chlorophyll c2 is C35H28MgN4O5. The chemical formula of chlorophyll c3 is C36H44MgN4O7. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of chlorophyllide-a is C35H34MgN4O5. + + kg m-3 @@ -8322,6 +8399,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. Condensed water means liquid and ice. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of diadinoxanthin is C40H54O3. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/DIADXXXX/2/. + + kg m-3 @@ -8378,6 +8462,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + kg m-3 @@ -8455,6 +8546,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of fucoxanthin is C42H58O6. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/FUCXZZZZ/2/. + + kg m-3 @@ -8637,6 +8735,13 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The mass concentration of liquid water takes into account all cloud droplets and liquid precipitation regardless of drop size or fall speed. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of lutein is C40H56O2. + + kg m-3 @@ -8707,6 +8812,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + kg m-3 @@ -8833,6 +8945,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol takes up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the aerosol. "Dry aerosol particles" means aerosol particles without any water uptake. The term "particulate_organic_matter_dry_aerosol" means all particulate organic matter dry aerosol except elemental carbon. It is the sum of primary_particulate_organic_matter_dry_aerosol and secondary_particulate_organic_matter_dry_aerosol. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/PERDXXXX/2/. + + kg m-3 @@ -8861,6 +8980,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Petroleum hydrocarbons are compounds containing just carbon and hydrogen originating from the fossil fuel crude oil. + + kg m-3 + + + Concentration of phaeopigment per unit volume of the water body, where the filtration size or collection method is unspecified (equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/. "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phaeopigments are a group of non-photosynthetic pigments that are the degradation product of algal chlorophyll pigments. Phaeopigments contain phaeophytin, which fluoresces in response to excitation light, and phaeophorbide, which is colorless and does not fluoresce (source: https://academic.oup.com/plankt/article/24/11/1221/1505482). Phaeopigment concentration commonly increases during the development phase of marine phytoplankton blooms, and declines in the post bloom stage (source: https://www.sciencedirect.com/science/article/pii/0967063793901018). + + kg m-3 @@ -8931,6 +9057,13 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of prasinoxanthin is C40H56O4. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/PXAPXXXX/2/. + + kg m-3 @@ -9036,6 +9169,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of violaxanthin is C40H56O4. + + kg m-3 @@ -9064,6 +9204,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of zeaxanthin is C40H56O2. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ZEAXXXXX/2/. + + kg m-3 @@ -10737,6 +10884,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ATPXZZDZ/2/. + + mol m-3 @@ -11185,6 +11339,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The concentration of any chemical species, whether particulate or dissolved, may vary with depth in the ocean. A depth profile may go through one or more local minima in concentration. The mole_concentration_of_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile is the mole concentration of oxygen at the local minimum in the concentration profile that occurs closest to the sea surface. The chemical formula for molecular oxygen is O2. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved nitrogen" means the sum of all nitrogen in solution: inorganic nitrogen (nitrite, nitrate and ammonium) plus nitrogen in carbon compounds. + + mol m-3 @@ -11199,6 +11360,20 @@ "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved organic nitrogen" describes the nitrogen held in carbon compounds in solution. These are mostly generated by plankton excretion and decay. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Organic phosphorus" means phosphorus in carbon compounds. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ORGPDSZZ/4/. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. + + mol m-3 @@ -11626,6 +11801,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + mol m-3 @@ -18595,21 +18777,21 @@ Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. @@ -31472,16 +31654,12 @@ - - biological_taxon_lsid - - temperature_in_ground - - surface_snow_density + + biological_taxon_lsid @@ -31516,14 +31694,18 @@ tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - atmosphere_upward_absolute_vorticity + + surface_snow_density atmosphere_upward_relative_vorticity + + atmosphere_upward_absolute_vorticity + + area_type @@ -31532,34 +31714,46 @@ area_type - - iron_growth_limitation_of_diazotrophic_phytoplankton + + mass_fraction_of_liquid_precipitation_in_air - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + mass_fraction_of_liquid_precipitation_in_air tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - mass_fraction_of_liquid_precipitation_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - mass_fraction_of_liquid_precipitation_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + iron_growth_limitation_of_diazotrophic_phytoplankton + + + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + air_pseudo_equivalent_potential_temperature @@ -31576,64 +31770,300 @@ tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + sea_water_velocity_from_direction - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + sea_water_velocity_to_direction - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + sea_water_velocity_to_direction - - air_pseudo_equivalent_temperature + + integral_wrt_depth_of_product_of_salinity_and_sea_water_density - - air_equivalent_temperature + + integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density - - atmosphere_mass_content_of_convective_cloud_liquid_water + + integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density - - effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + volume_fraction_of_condensed_water_in_soil_at_wilting_point - - northward_heat_flux_in_air_due_to_eddy_advection + + volume_fraction_of_condensed_water_in_soil_at_field_capacity - - northward_eliassen_palm_flux_in_air + + volume_fraction_of_condensed_water_in_soil_at_critical_point - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood + + volume_fraction_of_condensed_water_in_soil - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - net_primary_productivity_of_biomass_expressed_as_carbon + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + + + tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + + + tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + + + tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + + + effective_radius_of_stratiform_cloud_snow_particles + + + + tendency_of_atmosphere_moles_of_cfc11 + + + + moles_of_cfc11_per_unit_mass_in_sea_water + + + + atmosphere_moles_of_cfc11 + + + + tendency_of_atmosphere_moles_of_cfc113 + + + + atmosphere_moles_of_cfc113 + + + + tendency_of_atmosphere_moles_of_cfc114 + + + + atmosphere_moles_of_cfc114 + + + + tendency_of_atmosphere_moles_of_cfc115 + + + + atmosphere_moles_of_cfc115 + + + + tendency_of_atmosphere_moles_of_cfc12 + + + + atmosphere_moles_of_cfc12 + + + + tendency_of_atmosphere_moles_of_halon1202 + + + + atmosphere_moles_of_halon1202 + + + + tendency_of_atmosphere_moles_of_halon1211 + + + + atmosphere_moles_of_halon1211 + + + + tendency_of_atmosphere_moles_of_halon1301 + + + + atmosphere_moles_of_halon1301 + + + + tendency_of_atmosphere_moles_of_halon2402 + + + + atmosphere_moles_of_halon2402 + + + + tendency_of_atmosphere_moles_of_hcc140a + + + + atmosphere_moles_of_hcc140a + + + + tendency_of_troposphere_moles_of_hcc140a + + + + tendency_of_middle_atmosphere_moles_of_hcc140a + + + + tendency_of_troposphere_moles_of_hcfc22 + + + + tendency_of_atmosphere_moles_of_hcfc22 + + + + atmosphere_moles_of_hcfc22 + + + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + electrical_mobility_diameter_of_ambient_aerosol_particles + + + + diameter_of_ambient_aerosol_particles + + + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + + + effective_radius_of_stratiform_cloud_rain_particles + + + + effective_radius_of_stratiform_cloud_ice_particles + + + + effective_radius_of_stratiform_cloud_graupel_particles + + + + effective_radius_of_convective_cloud_snow_particles + + + + effective_radius_of_convective_cloud_rain_particles + + + + effective_radius_of_convective_cloud_ice_particles + + + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + + + backscattering_ratio_in_air + + + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + + + floating_ice_shelf_area_fraction + + + + atmosphere_moles_of_carbon_tetrachloride + + + + mole_fraction_of_methylglyoxal_in_air + + + + mole_fraction_of_dichlorine_peroxide_in_air + + + + atmosphere_mass_content_of_convective_cloud_liquid_water + + + + effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + + + air_equivalent_temperature + + + + air_pseudo_equivalent_temperature + + + + mass_content_of_cloud_liquid_water_in_atmosphere_layer + + + + air_equivalent_potential_temperature + + + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top @@ -31660,360 +32090,104 @@ atmosphere_mass_content_of_cloud_liquid_water - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_cloud_ice_in_atmosphere_layer - - - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - - - mass_concentration_of_mercury_dry_aerosol_particles_in_air - - - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - - - sea_water_velocity_to_direction - - - - sea_water_velocity_to_direction - - - - gross_primary_productivity_of_biomass_expressed_as_carbon - - - - eastward_water_vapor_flux_in_air - - - - atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - - - tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - - - tendency_of_atmosphere_mass_content_of_water_vapor - - - - lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - - - atmosphere_mass_content_of_water_vapor - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - - - tendency_of_middle_atmosphere_moles_of_methyl_bromide - - - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - - - atmosphere_mass_content_of_sulfate - - - - atmosphere_mass_content_of_sulfate - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - - - atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - - - atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - - - mass_content_of_cloud_liquid_water_in_atmosphere_layer - - - - air_equivalent_potential_temperature - - - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - - - wave_frequency - - - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - - - tendency_of_troposphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_methane - - atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc - - integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - integral_wrt_depth_of_product_of_salinity_and_sea_water_density + + mole_fraction_of_inorganic_bromine_in_air - - tendency_of_atmosphere_moles_of_methyl_bromide + + water_vapor_saturation_deficit_in_air - - integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning - - atmosphere_moles_of_methyl_bromide + + tendency_of_atmosphere_moles_of_carbon_tetrachloride - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + tendency_of_atmosphere_moles_of_carbon_monoxide - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + platform_yaw - - tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + platform_pitch - - tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + platform_roll - - volume_fraction_of_condensed_water_in_soil_at_wilting_point + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - volume_fraction_of_condensed_water_in_soil_at_field_capacity + + tendency_of_air_temperature_due_to_stratiform_precipitation - - volume_fraction_of_condensed_water_in_soil_at_critical_point + + stratiform_precipitation_flux - - volume_fraction_of_condensed_water_in_soil + + stratiform_precipitation_amount - - product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height + + lwe_thickness_of_stratiform_precipitation_amount - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + lwe_stratiform_precipitation_rate - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + water_evaporation_amount_from_canopy - - tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + water_evaporation_flux_from_canopy - - atmosphere_moles_of_methane + + precipitation_flux_onto_canopy - - electrical_mobility_diameter_of_ambient_aerosol_particles + + outgoing_water_volume_transport_along_river_channel - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - effective_radius_of_stratiform_cloud_snow_particles - - - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air - - - - atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - - - atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - - - atmosphere_mass_content_of_mercury_dry_aerosol_particles - - - - backscattering_ratio_in_air - - - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + mass_fraction_of_mercury_dry_aerosol_particles_in_air @@ -32024,256 +32198,224 @@ tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - tendency_of_atmosphere_moles_of_cfc11 - - - - moles_of_cfc11_per_unit_mass_in_sea_water - - - - atmosphere_moles_of_cfc11 - - - - tendency_of_atmosphere_moles_of_hcc140a - - - - effective_radius_of_convective_cloud_rain_particles - - - - tendency_of_troposphere_moles_of_hcc140a - - - - tendency_of_middle_atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_moles_of_hcfc22 + + stratiform_cloud_area_fraction - - atmosphere_moles_of_hcfc22 + + magnitude_of_sea_ice_displacement - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky - - diameter_of_ambient_aerosol_particles + + surface_downwelling_shortwave_flux_in_air - - effective_radius_of_stratiform_cloud_ice_particles + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - effective_radius_of_convective_cloud_ice_particles + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air - - effective_radius_of_stratiform_cloud_graupel_particles + + surface_downwelling_radiance_per_unit_wavelength_in_sea_water - - effective_radius_of_stratiform_cloud_rain_particles + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - effective_radius_of_convective_cloud_snow_particles + + surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + surface_downwelling_longwave_flux_in_air - - stratiform_cloud_area_fraction + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - sea_water_velocity_from_direction + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - thickness_of_stratiform_snowfall_amount + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - lwe_thickness_of_stratiform_snowfall_amount + + downwelling_radiative_flux_per_unit_wavelength_in_air - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + downwelling_radiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + downwelling_radiance_per_unit_wavelength_in_air - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + downwelling_photon_flux_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - atmosphere_net_upward_convective_mass_flux + + surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - atmosphere_moles_of_hcc140a + + upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - floating_ice_shelf_area_fraction + + upwelling_radiative_flux_per_unit_wavelength_in_air - - atmosphere_moles_of_carbon_tetrachloride + + upwelling_radiance_per_unit_wavelength_in_air - - mole_fraction_of_methylglyoxal_in_air + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - mole_fraction_of_dichlorine_peroxide_in_air + + surface_upwelling_shortwave_flux_in_air - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + surface_upwelling_radiative_flux_per_unit_wavelength_in_air - - mole_fraction_of_inorganic_bromine_in_air + + surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - water_vapor_saturation_deficit_in_air + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + soil_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_carbon_monoxide + + slow_soil_pool_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc113 + + root_mass_content_of_carbon - - atmosphere_moles_of_cfc113 + + miscellaneous_living_matter_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc114 + + fast_soil_pool_mass_content_of_carbon - - atmosphere_moles_of_cfc114 + + medium_soil_pool_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc115 + + leaf_mass_content_of_carbon - - atmosphere_moles_of_cfc115 + + carbon_mass_content_of_forestry_and_agricultural_products - - tendency_of_atmosphere_moles_of_cfc12 + + carbon_mass_content_of_forestry_and_agricultural_products - - atmosphere_moles_of_cfc12 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - tendency_of_atmosphere_moles_of_halon1202 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - atmosphere_moles_of_halon1202 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - tendency_of_atmosphere_moles_of_halon1211 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - atmosphere_moles_of_halon1211 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - tendency_of_atmosphere_moles_of_halon1301 + + northward_transformed_eulerian_mean_air_velocity - - atmosphere_moles_of_halon1301 + + eastward_transformed_eulerian_mean_air_velocity - - tendency_of_atmosphere_moles_of_halon2402 + + surface_litter_mass_content_of_carbon - - atmosphere_moles_of_halon2402 + + litter_mass_content_of_carbon @@ -32308,14 +32450,14 @@ mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction @@ -32324,56 +32466,64 @@ volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_convective_cloud_condensed_water + + water_vapor_partial_pressure_in_air - - water_evaporation_flux_from_canopy + + platform_name - - precipitation_flux_onto_canopy + + platform_id - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + mass_flux_of_carbon_into_litter_from_vegetation - - surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + subsurface_litter_mass_content_of_carbon - - upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + stem_mass_content_of_carbon - - downwelling_photon_flux_per_unit_wavelength_in_sea_water + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - downwelling_radiance_per_unit_wavelength_in_sea_water + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - surface_downwelling_shortwave_flux_in_air + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice + + surface_upwelling_radiance_per_unit_wavelength_in_air + + + + surface_upwelling_longwave_flux_in_air + + + + incoming_water_volume_transport_along_river_channel @@ -32392,792 +32542,820 @@ sea_ice_temperature_expressed_as_heat_content - - outgoing_water_volume_transport_along_river_channel + + water_evapotranspiration_flux - - lwe_thickness_of_stratiform_precipitation_amount + + surface_water_evaporation_flux - - tendency_of_atmosphere_moles_of_methane + + water_volume_transport_into_sea_water_from_rivers - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + stratiform_graupel_flux - - magnitude_of_sea_ice_displacement + + wood_debris_mass_content_of_carbon - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + water_flux_into_sea_water_from_rivers - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + integral_wrt_depth_of_sea_water_temperature - - surface_downwelling_longwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + integral_wrt_depth_of_sea_water_practical_salinity - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - downwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - downwelling_radiance_per_unit_wavelength_in_air + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - surface_upwelling_longwave_flux_in_air_assuming_clear_sky + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - upwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection - - upwelling_radiance_per_unit_wavelength_in_air + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - surface_upwelling_shortwave_flux_in_air + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_sea_water + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - incoming_water_volume_transport_along_river_channel + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - surface_upwelling_longwave_flux_in_air + + ocean_heat_y_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + ocean_heat_x_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiative_flux_per_unit_wavelength_in_air + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + integral_wrt_time_of_toa_outgoing_longwave_flux - - wood_debris_mass_content_of_carbon + + integral_wrt_time_of_toa_net_downward_shortwave_flux - - water_flux_into_sea_water_from_rivers + + integral_wrt_time_of_surface_net_downward_shortwave_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_net_downward_longwave_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_downward_sensible_heat_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_downward_latent_heat_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_air_temperature_excess - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + integral_wrt_time_of_air_temperature_deficit - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition - - integral_wrt_depth_of_sea_water_practical_salinity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling - - platform_yaw + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal - - platform_roll + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires - - water_vapor_partial_pressure_in_air + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion - - platform_name + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport - - platform_id + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport - - platform_pitch + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires - - tendency_of_air_temperature_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution - - water_evaporation_amount_from_canopy + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles - - atmosphere_mass_content_of_cloud_ice + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air - - stratiform_precipitation_amount + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_moles_of_nitrous_oxide + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + air_pressure_at_mean_sea_level - - medium_soil_pool_mass_content_of_carbon + + sea_floor_depth_below_geoid - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + sea_surface_height_above_geoid - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon + + sea_surface_height_above_geoid - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - surface_litter_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - fast_soil_pool_mass_content_of_carbon + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - soil_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition - - slow_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition - - root_mass_content_of_carbon + + surface_geostrophic_eastward_sea_water_velocity - - miscellaneous_living_matter_mass_content_of_carbon + + surface_geostrophic_northward_sea_water_velocity - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_sea_surface_height_above_mean_sea_level + + + + surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + sea_surface_height_above_mean_sea_level - - carbon_mass_content_of_forestry_and_agricultural_products + + sea_surface_height_above_mean_sea_level - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + sea_floor_depth_below_mean_sea_level - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - northward_transformed_eulerian_mean_air_velocity + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - eastward_transformed_eulerian_mean_air_velocity + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - mass_flux_of_carbon_into_litter_from_vegetation + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - subsurface_litter_mass_content_of_carbon + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - litter_mass_content_of_carbon + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - stem_mass_content_of_carbon + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - stratiform_precipitation_flux + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - lwe_stratiform_precipitation_rate + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - surface_water_evaporation_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - water_evapotranspiration_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - water_volume_transport_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - stratiform_graupel_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + sea_surface_swell_wave_mean_period - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + sea_surface_wind_wave_mean_period - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + sea_surface_wave_mean_period - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + sea_surface_wind_wave_to_direction - - integral_wrt_time_of_toa_outgoing_longwave_flux + + sea_surface_swell_wave_to_direction - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + mass_content_of_water_in_soil - - integral_wrt_time_of_surface_net_downward_longwave_flux + + mass_content_of_water_in_soil_layer - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + sea_surface_wave_significant_height - - integral_wrt_time_of_surface_downward_latent_heat_flux + + sea_surface_wind_wave_significant_height - - integral_wrt_time_of_air_temperature_excess + + sea_surface_swell_wave_significant_height - - integral_wrt_time_of_air_temperature_deficit + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - angstrom_exponent_of_ambient_aerosol_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - atmosphere_convective_available_potential_energy + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - atmosphere_convective_available_potential_energy + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - lagrangian_tendency_of_air_pressure + + number_concentration_of_ambient_aerosol_particles_in_air - - lagrangian_tendency_of_air_pressure + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_geoid + + mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_geoid + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - sea_surface_height_above_mean_sea_level + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_mean_sea_level + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - sea_floor_depth_below_geoid + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles - - surface_geostrophic_northward_sea_water_velocity + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - air_pressure_at_mean_sea_level + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - sea_floor_depth_below_mean_sea_level + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - sea_surface_wind_wave_mean_period + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - sea_surface_wave_mean_period + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - sea_surface_swell_wave_mean_period + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - sea_surface_wind_wave_to_direction + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles - - sea_surface_swell_wave_to_direction + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles - - mass_content_of_water_in_soil_layer + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - mass_content_of_water_in_soil + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - sea_surface_wind_wave_significant_height + + atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - sea_surface_swell_wave_significant_height + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + atmosphere_mass_content_of_mercury_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + angstrom_exponent_of_ambient_aerosol_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition @@ -33188,528 +33366,532 @@ mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - sea_surface_wave_significant_height + + atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves - - number_concentration_of_ambient_aerosol_particles_in_air + + net_primary_productivity_of_biomass_expressed_as_carbon - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + gross_primary_productivity_of_biomass_expressed_as_carbon - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + atmosphere_convective_available_potential_energy - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + atmosphere_convective_available_potential_energy - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + mass_concentration_of_chlorophyll_in_sea_water - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_concentration_of_chlorophyll_in_sea_water - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + isotropic_radiance_per_unit_wavelength_in_air - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + isotropic_radiance_per_unit_wavelength_in_air - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + land_ice_lwe_surface_specific_mass_balance_rate - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + land_ice_surface_specific_mass_balance_rate - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + equivalent_thickness_at_stp_of_atmosphere_ozone_content - - surface_upward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - surface_downward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - atmosphere_mass_content_of_cloud_condensed_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - northward_water_vapor_flux_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - lwe_stratiform_snowfall_rate + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - stratiform_snowfall_amount + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - stratiform_rainfall_rate + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - stratiform_rainfall_flux + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - stratiform_rainfall_amount + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_sea_surface_height_above_mean_sea_level + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - surface_geostrophic_eastward_sea_water_velocity + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + atmosphere_net_upward_convective_mass_flux - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_molecular_hydrogen - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methyl_chloride - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + tendency_of_troposphere_moles_of_methyl_bromide - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methane - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_methyl_chloride - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_middle_atmosphere_moles_of_methyl_bromide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_middle_atmosphere_moles_of_methane - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_moles_of_nitrous_oxide - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_moles_of_molecular_hydrogen - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + tendency_of_atmosphere_moles_of_methyl_chloride - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_methyl_bromide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + y_wind - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + x_wind - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + sea_water_y_velocity - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + sea_water_x_velocity - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + atmosphere_moles_of_nitrous_oxide - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + atmosphere_moles_of_molecular_hydrogen - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + atmosphere_moles_of_methyl_chloride - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + atmosphere_moles_of_methyl_bromide - - surface_upward_sensible_heat_flux + + atmosphere_moles_of_methane - - surface_temperature + + atmosphere_moles_of_carbon_monoxide - - surface_temperature + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - surface_temperature + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - surface_net_downward_radiative_flux + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - wind_mixing_energy_flux_into_sea_water + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - water_flux_into_sea_water + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - upward_eliassen_palm_flux_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + tendency_of_atmosphere_mass_content_of_water_vapor - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - specific_gravitational_potential_energy + + mass_content_of_water_vapor_in_atmosphere_layer - - product_of_northward_wind_and_specific_humidity + + mass_content_of_water_in_atmosphere_layer - - mole_fraction_of_ozone_in_air + + mass_content_of_cloud_ice_in_atmosphere_layer - - isotropic_shortwave_radiance_in_air + + mass_content_of_cloud_condensed_water_in_atmosphere_layer - - isotropic_longwave_radiance_in_air + + lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - stratiform_snowfall_flux + + atmosphere_mass_content_of_sulfate - - thickness_of_stratiform_rainfall_amount + + atmosphere_mass_content_of_sulfate - - sea_surface_wind_wave_period + + surface_upward_mole_flux_of_carbon_dioxide - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + surface_downward_mole_flux_of_carbon_dioxide - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen + + atmosphere_mass_content_of_water_vapor - - tendency_of_middle_atmosphere_moles_of_methyl_chloride + + atmosphere_mass_content_of_convective_cloud_condensed_water - - tendency_of_middle_atmosphere_moles_of_methane + + atmosphere_mass_content_of_cloud_ice - - sea_water_y_velocity + + atmosphere_mass_content_of_cloud_condensed_water - - sea_water_x_velocity + + thickness_of_stratiform_snowfall_amount - - mole_fraction_of_hypochlorous_acid_in_air + + thickness_of_stratiform_rainfall_amount - - tendency_of_troposphere_moles_of_molecular_hydrogen + + stratiform_snowfall_flux - - tendency_of_troposphere_moles_of_methyl_chloride + + stratiform_snowfall_amount - - mass_content_of_water_vapor_in_atmosphere_layer + + stratiform_rainfall_rate - - mass_content_of_water_in_atmosphere_layer + + stratiform_rainfall_flux - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + stratiform_rainfall_amount - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + northward_water_vapor_flux_in_air - - tendency_of_troposphere_moles_of_methyl_bromide + + lwe_thickness_of_stratiform_snowfall_amount - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection + + lwe_stratiform_snowfall_rate - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection + + kinetic_energy_dissipation_in_atmosphere_boundary_layer - - radiation_wavelength + + eastward_water_vapor_flux_in_air - - tendency_of_troposphere_moles_of_methane + + surface_upward_sensible_heat_flux - - tendency_of_atmosphere_mass_content_of_water_due_to_advection + + surface_temperature - - mole_fraction_of_chlorine_monoxide_in_air + + surface_temperature - - mole_fraction_of_chlorine_dioxide_in_air + + surface_temperature - - mass_fraction_of_ozone_in_air + + surface_net_downward_radiative_flux - - mass_fraction_of_convective_cloud_condensed_water_in_air + + mole_fraction_of_hypochlorous_acid_in_air - - sea_surface_swell_wave_period + + mole_fraction_of_chlorine_monoxide_in_air - - surface_drag_coefficient_in_air + + mole_fraction_of_chlorine_dioxide_in_air - - mass_content_of_cloud_condensed_water_in_atmosphere_layer + + wind_mixing_energy_flux_into_sea_water - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + water_flux_into_sea_water - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves - - y_wind + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves - - kinetic_energy_dissipation_in_atmosphere_boundary_layer + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - mass_concentration_of_suspended_matter_in_sea_water + + upward_eliassen_palm_flux_in_air - - x_wind + + northward_heat_flux_in_air_due_to_eddy_advection - - isotropic_radiance_per_unit_wavelength_in_air + + northward_eliassen_palm_flux_in_air - - isotropic_radiance_per_unit_wavelength_in_air + + wave_frequency - - atmosphere_moles_of_nitrous_oxide + + sea_surface_wind_wave_period - - atmosphere_moles_of_molecular_hydrogen + + sea_surface_swell_wave_period - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots + + mass_concentration_of_suspended_matter_in_sea_water - - atmosphere_moles_of_methyl_chloride + + surface_drag_coefficient_in_air - - land_ice_surface_specific_mass_balance_rate + + surface_drag_coefficient_for_momentum_in_air - - land_ice_lwe_surface_specific_mass_balance_rate + + surface_drag_coefficient_for_heat_in_air - - tendency_of_atmosphere_moles_of_molecular_hydrogen + + specific_gravitational_potential_energy - - atmosphere_moles_of_carbon_monoxide + + radiation_wavelength - - tendency_of_atmosphere_moles_of_methyl_chloride + + product_of_northward_wind_and_specific_humidity - - surface_drag_coefficient_for_momentum_in_air + + mole_fraction_of_ozone_in_air - - surface_drag_coefficient_for_heat_in_air + + isotropic_shortwave_radiance_in_air - - leaf_mass_content_of_carbon + + isotropic_longwave_radiance_in_air - - mass_concentration_of_chlorophyll_in_sea_water + + mass_fraction_of_ozone_in_air - - mass_concentration_of_chlorophyll_in_sea_water + + mass_fraction_of_convective_cloud_condensed_water_in_air diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 26f03c0566..b944f9b22f 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -44,6 +44,10 @@ standard library function :func:`os.path.expanduser` and module :mod:`fnmatch` for more details. + .. warning:: + + If supplying a URL, only OPeNDAP Data Sources are supported. + * constraints: Either a single constraint, or an iterable of constraints. Each constraint can be either a string, an instance of @@ -93,19 +97,23 @@ def callback(cube, field, filename): import threading import iris._constraints -from iris._deprecation import IrisDeprecation, warn_deprecated import iris.config import iris.io +from ._deprecation import IrisDeprecation, warn_deprecated + +try: + from ._version import version as __version__ # noqa: F401 +except ModuleNotFoundError: + __version__ = "unknown" + + try: import iris_sample_data except ImportError: iris_sample_data = None -# Iris revision. -__version__ = "3.2.dev0" - # Restrict the names imported when using "from iris import *" __all__ = [ "AttributeConstraint", @@ -132,7 +140,7 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__(self): + def __init__(self, datum_support=False): """ A container for run-time options controls. @@ -147,22 +155,24 @@ def __init__(self): .. note:: iris.FUTURE.example_future_flag does not exist. It is provided - as an example because there are currently no flags in - iris.Future. + as an example. """ - # The flag 'example_future_flag' is provided as a future reference - # for the structure of this class. + # The flag 'example_future_flag' is provided as a reference for the + # structure of this class. + # + # Note that self.__dict__ is used explicitly due to the manner in which + # __setattr__ is overridden. # # self.__dict__['example_future_flag'] = example_future_flag - pass + self.__dict__["datum_support"] = datum_support def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) - msg = "Future()" - return msg.format() + msg = "Future(datum_support={})" + return msg.format(self.datum_support) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options = {} @@ -207,8 +217,7 @@ def context(self, **kwargs): .. note:: iris.FUTURE.example_future_flag does not exist and is - provided only as an example since there are currently no - flags in Future. + provided only as an example. """ # Save the current context @@ -287,6 +296,7 @@ def load(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -315,6 +325,7 @@ def load_cube(uris, constraint=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -354,6 +365,7 @@ def load_cubes(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -399,6 +411,7 @@ def load_raw(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 038f9d9337..ac7ae34511 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -10,7 +10,7 @@ """ -from functools import wraps +from functools import lru_cache, wraps import dask import dask.array as da @@ -47,7 +47,14 @@ def is_lazy_data(data): return result -def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): +@lru_cache +def _optimum_chunksize_internals( + chunks, + shape, + limit=None, + dtype=np.dtype("f4"), + dask_array_chunksize=dask.config.get("array.chunk-size"), +): """ Reduce or increase an initial chunk shape to get close to a chosen ideal size, while prioritising the splitting of the earlier (outer) dimensions @@ -86,7 +93,7 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): # Set the chunksize limit. if limit is None: # Fetch the default 'optimal' chunksize from the dask config. - limit = dask.config.get("array.chunk-size") + limit = dask_array_chunksize # Convert to bytes limit = dask.utils.parse_bytes(limit) @@ -146,6 +153,25 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): return tuple(result) +@wraps(_optimum_chunksize_internals) +def _optimum_chunksize( + chunks, + shape, + limit=None, + dtype=np.dtype("f4"), +): + # By providing dask_array_chunksize as an argument, we make it so that the + # output of _optimum_chunksize_internals depends only on its arguments (and + # thus we can use lru_cache) + return _optimum_chunksize_internals( + tuple(chunks), + tuple(shape), + limit=limit, + dtype=dtype, + dask_array_chunksize=dask.config.get("array.chunk-size"), + ) + + def as_lazy_data(data, chunks=None, asarray=False): """ Convert the input array `data` to a dask array. @@ -359,7 +385,7 @@ def map_complete_blocks(src, func, dims, out_sizes): Args: - * src (:class:`~iris.cube.Cube`): + * src (:class:`~iris.cube.Cube` or array-like): Source cube that function is applied to. * func: Function to apply. @@ -369,10 +395,15 @@ def map_complete_blocks(src, func, dims, out_sizes): Output size of dimensions that cannot be chunked. """ - if not src.has_lazy_data(): + if is_lazy_data(src): + data = src + elif not hasattr(src, "has_lazy_data"): + # Not a lazy array and not a cube. So treat as ordinary numpy array. + return func(src) + elif not src.has_lazy_data(): return func(src.data) - - data = src.lazy_data() + else: + data = src.lazy_data() # Ensure dims are not chunked in_chunks = list(data.chunks) diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 81d46bb29f..ea32fc5126 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -252,15 +252,18 @@ def add_scalar_row(name, value=""): # Add a row for each item # NOTE: different section types need different handling title = sect_name.lower() - if "scalar coordinate" in title: + if title == "scalar coordinates:": for item in sect.contents: add_scalar_row(item.name, item.content) if item.extra: add_scalar_row(item_to_extra_indent + item.extra) - elif "attribute" in title or "cell method" in title: + elif title in ("attributes:", "cell methods:", "mesh:"): for title, value in zip(sect.names, sect.values): add_scalar_row(title, value) - elif "scalar cell measure" in title: + elif title in ( + "scalar ancillary variables:", + "scalar cell measures:", + ): # These are just strings: nothing in the 'value' column. for name in sect.contents: add_scalar_row(name) diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 1e78a92fd1..6b0d4cf0f3 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -48,11 +48,25 @@ def __init__(self, cube, name_padding=35): self.dimension_header = DimensionHeader(cube) -def string_repr(text, quote_strings=False): +def string_repr(text, quote_strings=False, clip_strings=False): """Produce a one-line printable form of a text string.""" - if re.findall("[\n\t]", text) or quote_strings: + force_quoted = re.findall("[\n\t]", text) or quote_strings + if force_quoted: # Replace the string with its repr (including quotes). text = repr(text) + if clip_strings: + # First check for quotes. + # N.B. not just 'quote_strings', but also array values-as-strings + has_quotes = text[0] in "\"'" + if has_quotes: + # Strip off (and store) any outer quotes before clipping. + pre_quote, post_quote = text[0], text[-1] + text = text[1:-1] + # clipping : use 'rider' with extra space in case it ends in a '.' + text = iris.util.clip_string(text, rider=" ...") + if has_quotes: + # Replace in original quotes + text = pre_quote + text + post_quote return text @@ -62,17 +76,20 @@ def array_repr(arr): text = repr(arr) # ..then reduce any multiple spaces and newlines. text = re.sub("[ \t\n]+", " ", text) + text = string_repr(text, quote_strings=False, clip_strings=True) return text -def value_repr(value, quote_strings=False): +def value_repr(value, quote_strings=False, clip_strings=False): """ Produce a single-line printable version of an attribute or scalar value. """ if hasattr(value, "dtype"): value = array_repr(value) elif isinstance(value, str): - value = string_repr(value, quote_strings=quote_strings) + value = string_repr( + value, quote_strings=quote_strings, clip_strings=clip_strings + ) value = str(value) return value @@ -132,7 +149,7 @@ def __init__(self, cube, vector, iscoord): self.extra = "" -class ScalarSummary(CoordSummary): +class ScalarCoordSummary(CoordSummary): def __init__(self, cube, coord): self.name = coord.name() if ( @@ -188,10 +205,12 @@ def __init__(self, title, cube, vectors, iscoord): ] -class ScalarSection(Section): +class ScalarCoordSection(Section): def __init__(self, title, cube, scalars): self.title = title - self.contents = [ScalarSummary(cube, scalar) for scalar in scalars] + self.contents = [ + ScalarCoordSummary(cube, scalar) for scalar in scalars + ] class ScalarCellMeasureSection(Section): @@ -200,6 +219,12 @@ def __init__(self, title, cell_measures): self.contents = [cm.name() for cm in cell_measures] +class ScalarAncillaryVariableSection(Section): + def __init__(self, title, ancillary_variables): + self.title = title + self.contents = [av.name() for av in ancillary_variables] + + class AttributeSection(Section): def __init__(self, title, attributes): self.title = title @@ -207,14 +232,32 @@ def __init__(self, title, attributes): self.values = [] self.contents = [] for name, value in sorted(attributes.items()): - value = value_repr(value, quote_strings=True) - value = iris.util.clip_string(value) + value = value_repr(value, quote_strings=True, clip_strings=True) self.names.append(name) self.values.append(value) content = "{}: {}".format(name, value) self.contents.append(content) +class ScalarMeshSection(AttributeSection): + # This happens to behave just like an attribute sections, but it + # initialises direct from the cube. + def __init__(self, title, cube): + self.title = title + self.names = [] + self.values = [] + self.contents = [] + if cube.mesh is not None: + self.names.extend(["name", "location"]) + self.values.extend([cube.mesh.name(), cube.location]) + self.contents.extend( + [ + "{}: {}".format(name, value) + for name, value in zip(self.names, self.values) + ] + ) + + class CellMethodSection(Section): def __init__(self, title, cell_methods): self.title = title @@ -237,7 +280,7 @@ class CubeSummary: """ - def __init__(self, cube, shorten=False, name_padding=35): + def __init__(self, cube, name_padding=35): self.header = FullHeader(cube, name_padding) # Cache the derived coords so we can rely on consistent @@ -277,13 +320,23 @@ def __init__(self, cube, shorten=False, name_padding=35): if id(coord) not in scalar_coord_ids ] - # cell measures - vector_cell_measures = [ - cm for cm in cube.cell_measures() if cm.shape != (1,) - ] - # Ancillary Variables - vector_ancillary_variables = [av for av in cube.ancillary_variables()] + vector_ancillary_variables = [] + scalar_ancillary_variables = [] + for av, av_dims in cube._ancillary_variables_and_dims: + if av_dims: + vector_ancillary_variables.append(av) + else: + scalar_ancillary_variables.append(av) + + # Cell Measures + vector_cell_measures = [] + scalar_cell_measures = [] + for cm, cm_dims in cube._cell_measures_and_dims: + if cm_dims: + vector_cell_measures.append(cm) + else: + scalar_cell_measures.append(cm) # Sort scalar coordinates by name. scalar_coords.sort(key=lambda coord: coord.name()) @@ -297,9 +350,6 @@ def __init__(self, cube, shorten=False, name_padding=35): vector_derived_coords.sort( key=lambda coord: (cube.coord_dims(coord), coord.name()) ) - scalar_cell_measures = [ - cm for cm in cube.cell_measures() if cm.shape == (1,) - ] self.vector_sections = {} @@ -322,14 +372,21 @@ def add_vector_section(title, contents, iscoord=True): def add_scalar_section(section_class, title, *args): self.scalar_sections[title] = section_class(title, *args) + add_scalar_section(ScalarMeshSection, "Mesh:", cube) + add_scalar_section( - ScalarSection, "Scalar coordinates:", cube, scalar_coords + ScalarCoordSection, "Scalar coordinates:", cube, scalar_coords ) add_scalar_section( ScalarCellMeasureSection, "Scalar cell measures:", scalar_cell_measures, ) + add_scalar_section( + ScalarAncillaryVariableSection, + "Scalar ancillary variables:", + scalar_ancillary_variables, + ) add_scalar_section( CellMethodSection, "Cell methods:", cube.cell_methods ) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 465a521065..11810f2901 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -37,7 +37,9 @@ from collections import OrderedDict from collections.abc import Iterable +import functools from functools import wraps +import warnings import dask.array as da import numpy as np @@ -63,6 +65,7 @@ "HMEAN", "Linear", "MAX", + "MAX_RUN", "MEAN", "MEDIAN", "MIN", @@ -79,6 +82,7 @@ "WPERCENTILE", "WeightedAggregator", "clear_phenomenon_identity", + "create_weighted_aggregator_fn", ) @@ -582,9 +586,19 @@ def aggregate(self, data, axis, **kwargs): mdtol = kwargs.pop("mdtol", None) result = self.call_func(data, axis=axis, **kwargs) - if mdtol is not None and ma.isMaskedArray(data): + if ( + mdtol is not None + and ma.is_masked(data) + and result is not ma.masked + ): fraction_not_missing = data.count(axis=axis) / data.shape[axis] - mask_update = 1 - mdtol > fraction_not_missing + mask_update = np.array(1 - mdtol > fraction_not_missing) + if np.array(result).ndim > mask_update.ndim: + # call_func created trailing dimension. + mask_update = np.broadcast_to( + mask_update.reshape(mask_update.shape + (1,)), + np.array(result).shape, + ) if ma.isMaskedArray(result): result.mask = result.mask | mask_update else: @@ -679,7 +693,7 @@ class PercentileAggregator(_Aggregator): """ - def __init__(self, units_func=None, lazy_func=None, **kwargs): + def __init__(self, units_func=None, **kwargs): """ Create a percentile aggregator. @@ -692,11 +706,6 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): Returns an :class:`cf_units.Unit`, or a value that can be made into one. - * lazy_func (callable or None): - An alternative to :data:`call_func` implementing a lazy - aggregation. Note that, it need not support all features of the - main operation, but should raise an error in unhandled cases. - Additional kwargs:: Passed through to :data:`call_func` and :data:`lazy_func`. @@ -714,10 +723,29 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): None, _percentile, units_func=units_func, - lazy_func=lazy_func, + lazy_func=_build_dask_mdtol_function(_percentile), **kwargs, ) + def _base_aggregate(self, data, axis, lazy, **kwargs): + """ + Method to avoid duplication of checks in aggregate and lazy_aggregate. + """ + msg = "{} aggregator requires the mandatory keyword argument {!r}." + for arg in self._args: + if arg not in kwargs: + raise ValueError(msg.format(self.name(), arg)) + + if kwargs.get("fast_percentile_method", False) and ( + kwargs.get("mdtol", 1) != 0 + ): + kwargs["error_on_masked"] = True + + if lazy: + return _Aggregator.lazy_aggregate(self, data, axis, **kwargs) + else: + return _Aggregator.aggregate(self, data, axis, **kwargs) + def aggregate(self, data, axis, **kwargs): """ Perform the percentile aggregation over the given data. @@ -753,12 +781,41 @@ def aggregate(self, data, axis, **kwargs): """ - msg = "{} aggregator requires the mandatory keyword argument {!r}." - for arg in self._args: - if arg not in kwargs: - raise ValueError(msg.format(self.name(), arg)) + return self._base_aggregate(data, axis, lazy=False, **kwargs) + + def lazy_aggregate(self, data, axis, **kwargs): + """ + Perform aggregation over the data with a lazy operation, analogous to + the 'aggregate' result. + + Keyword arguments are passed through to the data aggregation function + (for example, the "percent" keyword for a percentile aggregator). + This function is usually used in conjunction with update_metadata(), + which should be passed the same keyword arguments. + + Args: + + * data (array): + A lazy array (:class:`dask.array.Array`). + + * axis (int or list of int): + The dimensions to aggregate over -- note that this is defined + differently to the 'aggregate' method 'axis' argument, which only + accepts a single dimension index. + + Kwargs: + + * kwargs: + All keyword arguments are passed through to the data aggregation + function. - return _Aggregator.aggregate(self, data, axis, **kwargs) + Returns: + A lazy array representing the result of the aggregation operation + (:class:`dask.array.Array`). + + """ + + return self._base_aggregate(data, axis, lazy=True, **kwargs) def post_process(self, collapsed_cube, data_result, coords, **kwargs): """ @@ -1004,9 +1061,10 @@ def update_metadata(self, cube, coords, **kwargs): coord_names.append(coord.name()) # Add a cell method. - method_name = self.cell_method.format(**kwargs) - cell_method = iris.coords.CellMethod(method_name, coord_names) - cube.add_cell_method(cell_method) + if self.cell_method is not None: + method_name = self.cell_method.format(**kwargs) + cell_method = iris.coords.CellMethod(method_name, coord_names) + cube.add_cell_method(cell_method) class WeightedAggregator(Aggregator): @@ -1115,6 +1173,43 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): return result +def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): + """Return an aggregator function that can explicitely handle weights. + + Args: + + * aggregator_fn (callable): + An aggregator function, i.e., a callable that takes arguments ``data``, + ``axis`` and ``**kwargs`` and returns an array. Examples: + :meth:`Aggregator.aggregate`, :meth:`Aggregator.lazy_aggregate`. + This function should accept the keyword argument ``weights``. + * axis (int): + Axis to aggregate over. This argument is directly passed to + ``aggregator_fn``. + + Kwargs: + + * Arbitrary keyword arguments passed to ``aggregator_fn``. Should not + include ``weights`` (this will be removed if present). + + Returns: + A function that takes two arguments ``data_arr`` and ``weights`` (both + should be an array of the same shape) and returns an array. + + """ + kwargs_copy = dict(kwargs) + kwargs_copy.pop("weights", None) + aggregator_fn = functools.partial(aggregator_fn, axis=axis, **kwargs_copy) + + def new_aggregator_fn(data_arr, weights): + """Weighted aggregation.""" + if weights is None: + return aggregator_fn(data_arr) + return aggregator_fn(data_arr, weights=weights) + + return new_aggregator_fn + + def _build_dask_mdtol_function(dask_stats_function): """ Make a wrapped dask statistic function that supports the 'mdtol' keyword. @@ -1123,7 +1218,9 @@ def _build_dask_mdtol_function(dask_stats_function): call signature : "dask_stats_function(data, axis=axis, **kwargs)". It must be masked-data tolerant, i.e. it ignores masked input points and performs a calculation on only the unmasked points. - For example, mean([1, --, 2]) = (1 + 2) / 2 = 1.5. + For example, mean([1, --, 2]) = (1 + 2) / 2 = 1.5. If an additional + dimension is created by 'dask_function', it is assumed to be the trailing + one (as for '_percentile'). The returned value is a new function operating on dask arrays. It has the call signature `stat(data, axis=-1, mdtol=None, **kwargs)`. @@ -1143,6 +1240,12 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): points_per_calc = array.size / dask_result.size masked_point_fractions = point_mask_counts / points_per_calc boolean_mask = masked_point_fractions > mdtol + if dask_result.ndim > boolean_mask.ndim: + # dask_stats_function created trailing dimension. + boolean_mask = da.broadcast_to( + boolean_mask.reshape(boolean_mask.shape + (1,)), + dask_result.shape, + ) # Return an mdtol-masked version of the basic result. result = da.ma.masked_array( da.ma.getdata(dask_result), boolean_mask @@ -1152,7 +1255,78 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): return inner_stat -def _percentile(data, axis, percent, fast_percentile_method=False, **kwargs): +def _axis_to_single_trailing(stats_function): + """ + Given a statistical function that acts on the trailing axis of a 1D or 2D + array, wrap it so that higher dimension arrays can be passed, as well as any + axis as int or tuple. + + """ + + @wraps(stats_function) + def inner_stat(data, axis, *args, **kwargs): + # Get data as a 1D or 2D view with the target axis as the trailing one. + if not isinstance(axis, Iterable): + axis = (axis,) + end = range(-len(axis), 0) + + data = np.moveaxis(data, axis, end) + shape = data.shape[: -len(axis)] # Shape of dims we won't collapse. + if shape: + data = data.reshape(np.prod(shape), -1) + else: + data = data.flatten() + + result = stats_function(data, *args, **kwargs) + + # Ensure to unflatten any leading dimensions. + if shape: + # Account for the additive dimension if necessary. + if result.size > np.prod(shape): + shape += (-1,) + result = result.reshape(shape) + + return result + + return inner_stat + + +def _calc_percentile(data, percent, fast_percentile_method=False, **kwargs): + """ + Calculate percentiles along the trailing axis of a 1D or 2D array. + + """ + if fast_percentile_method: + if kwargs.pop("error_on_masked", False): + msg = ( + "Cannot use fast np.percentile method with masked array unless" + " mdtol is 0." + ) + if ma.is_masked(data): + raise TypeError(msg) + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + "Warning: 'partition' will ignore the 'mask' of the MaskedArray.", + ) + result = np.percentile(data, percent, axis=-1, **kwargs) + + result = result.T + else: + quantiles = percent / 100.0 + for key in ["alphap", "betap"]: + kwargs.setdefault(key, 1) + result = scipy.stats.mstats.mquantiles( + data, quantiles, axis=-1, **kwargs + ) + if not ma.isMaskedArray(data) and not ma.is_masked(result): + return np.asarray(result) + else: + return ma.MaskedArray(result) + + +@_axis_to_single_trailing +def _percentile(data, percent, fast_percentile_method=False, **kwargs): """ The percentile aggregator is an additive operation. This means that it *may* introduce a new dimension to the data for the statistic being @@ -1161,50 +1335,43 @@ def _percentile(data, axis, percent, fast_percentile_method=False, **kwargs): If a new additive dimension is formed, then it will always be the last dimension of the resulting percentile data payload. + Args: + + * data (array-like) + array from which percentiles are to be calculated + Kwargs: - * fast_percentile_method (boolean) : + * fast_percentile_method (boolean) When set to True, uses the numpy.percentiles method as a faster alternative to the scipy.mstats.mquantiles method. Does not handle masked arrays. + **kwargs : dict, optional + passed to scipy.stats.mstats.mquantiles if fast_percentile_method is + False. Otherwise passed to numpy.percentile. + """ - # Ensure that the target axis is the last dimension. - data = np.rollaxis(data, axis, start=data.ndim) - shape = data.shape[:-1] - # Flatten any leading dimensions. - if shape: - data = data.reshape([np.prod(shape), data.shape[-1]]) + if not isinstance(percent, Iterable): + percent = [percent] + percent = np.array(percent) + # Perform the percentile calculation. - if fast_percentile_method: - msg = "Cannot use fast np.percentile method with masked array." - if ma.is_masked(data): - raise TypeError(msg) - result = np.percentile(data, percent, axis=-1) - result = result.T - else: - quantiles = np.array(percent) / 100.0 - result = scipy.stats.mstats.mquantiles( - data, quantiles, axis=-1, **kwargs - ) - if not ma.isMaskedArray(data) and not ma.is_masked(result): - result = np.asarray(result) - else: - result = ma.MaskedArray(result) + _partial_percentile = functools.partial( + _calc_percentile, + percent=percent, + fast_percentile_method=fast_percentile_method, + **kwargs, + ) + + result = iris._lazy_data.map_complete_blocks( + data, _partial_percentile, (-1,), percent.shape + ) - # Ensure to unflatten any leading dimensions. - if shape: - if not isinstance(percent, Iterable): - percent = [percent] - percent = np.array(percent) - # Account for the additive dimension. - if percent.shape > (1,): - shape += percent.shape - result = result.reshape(shape) # Check whether to reduce to a scalar result, as per the behaviour # of other aggregators. - if result.shape == (1,) and quantiles.ndim == 0: - result = result[0] + if result.shape == (1,): + result = np.squeeze(result) return result @@ -1332,18 +1499,21 @@ def _weighted_percentile( return result -@_build_dask_mdtol_function -def _lazy_count(array, **kwargs): - array = iris._lazy_data.as_lazy_data(array) +def _count(array, **kwargs): + """ + Counts the number of points along the axis that satisfy the condition + specified by ``function``. Uses Dask's support for NEP13/18 to work as + either a lazy or a real function. + + """ func = kwargs.pop("function", None) if not callable(func): emsg = "function must be a callable. Got {}." raise TypeError(emsg.format(type(func))) - return da.sum(func(array), **kwargs) + return np.sum(func(array), **kwargs) def _proportion(array, function, axis, **kwargs): - count = iris._lazy_data.non_lazy(_lazy_count) # if the incoming array is masked use that to count the total number of # values if ma.isMaskedArray(array): @@ -1354,7 +1524,7 @@ def _proportion(array, function, axis, **kwargs): # case pass the array shape instead of the mask: total_non_masked = array.shape[axis] else: - total_non_masked = count( + total_non_masked = _count( array.mask, axis=axis, function=np.logical_not, **kwargs ) total_non_masked = ma.masked_equal(total_non_masked, 0) @@ -1367,12 +1537,52 @@ def _proportion(array, function, axis, **kwargs): # a dtype for its data that is different to the dtype of the fill-value, # which can cause issues outside this function. # Reference - tests/unit/analyis/test_PROPORTION.py Test_masked.test_ma - numerator = count(array, axis=axis, function=function, **kwargs) + numerator = _count(array, axis=axis, function=function, **kwargs) result = ma.asarray(numerator / total_non_masked) return result +def _lazy_max_run(array, axis=-1, **kwargs): + """ + Lazily perform the calculation of maximum run lengths along the given axis + """ + array = iris._lazy_data.as_lazy_data(array) + func = kwargs.pop("function", None) + if not callable(func): + emsg = "function must be a callable. Got {}." + raise TypeError(emsg.format(type(func))) + bool_array = da.ma.getdata(func(array)) + bool_array = da.logical_and( + bool_array, da.logical_not(da.ma.getmaskarray(array)) + ) + padding = [(0, 0)] * array.ndim + padding[axis] = (0, 1) + ones_zeros = da.pad(bool_array, padding).astype(int) + cum_sum = da.cumsum(ones_zeros, axis=axis) + run_totals = da.where(ones_zeros == 0, cum_sum, 0) + stepped_run_lengths = da.reductions.cumreduction( + np.maximum.accumulate, + np.maximum, + np.NINF, + run_totals, + axis=axis, + dtype=cum_sum.dtype, + out=None, + method="sequential", + preop=None, + ) + run_lengths = da.diff(stepped_run_lengths, axis=axis) + result = da.max(run_lengths, axis=axis) + + # Check whether to reduce to a scalar result, as per the behaviour + # of other aggregators. + if result.shape == (1,): + result = da.squeeze(result) + + return result + + def _rms(array, axis, **kwargs): # XXX due to the current limitations in `da.average` (see below), maintain # an explicit non-lazy aggregation function for now. @@ -1394,26 +1604,36 @@ def _lazy_rms(array, axis, **kwargs): # all. Thus trying to use this aggregator with weights will currently # raise an error in dask due to the unexpected keyword `weights`, # rather than silently returning the wrong answer. - return da.sqrt(da.mean(array ** 2, axis=axis, **kwargs)) + return da.sqrt(da.mean(array**2, axis=axis, **kwargs)) -@_build_dask_mdtol_function -def _lazy_sum(array, **kwargs): - array = iris._lazy_data.as_lazy_data(array) - # weighted or scaled sum +def _sum(array, **kwargs): + """ + Weighted or scaled sum. Uses Dask's support for NEP13/18 to work as either + a lazy or a real function. + + """ axis_in = kwargs.get("axis", None) weights_in = kwargs.pop("weights", None) returned_in = kwargs.pop("returned", False) if weights_in is not None: - wsum = da.sum(weights_in * array, **kwargs) + wsum = np.sum(weights_in * array, **kwargs) else: - wsum = da.sum(array, **kwargs) + wsum = np.sum(array, **kwargs) if returned_in: + al = da if iris._lazy_data.is_lazy_data(array) else np if weights_in is None: - weights = iris._lazy_data.as_lazy_data(np.ones_like(array)) + weights = al.ones_like(array) + if al is da: + # Dask version of ones_like does not preserve masks. See dask#9301. + weights = da.ma.masked_array( + weights, da.ma.getmaskarray(array) + ) else: - weights = weights_in - rvalue = (wsum, da.sum(weights, axis=axis_in)) + weights = al.ma.masked_array( + weights_in, mask=al.ma.getmaskarray(array) + ) + rvalue = (wsum, np.sum(weights, axis=axis_in)) else: rvalue = wsum return rvalue @@ -1533,9 +1753,9 @@ def interp_order(length): # COUNT = Aggregator( "count", - iris._lazy_data.non_lazy(_lazy_count), + _count, units_func=lambda units: 1, - lazy_func=_lazy_count, + lazy_func=_build_dask_mdtol_function(_count), ) """ An :class:`~iris.analysis.Aggregator` instance that counts the number @@ -1563,6 +1783,37 @@ def interp_order(length): """ +MAX_RUN = Aggregator( + None, + iris._lazy_data.non_lazy(_lazy_max_run), + units_func=lambda units: 1, + lazy_func=_build_dask_mdtol_function(_lazy_max_run), +) +""" +An :class:`~iris.analysis.Aggregator` instance that finds the longest run of +:class:`~iris.cube.Cube` data occurrences that satisfy a particular criterion, +as defined by a user supplied *function*, along the given axis. + +**Required** kwargs associated with the use of this aggregator: + +* function (callable): + A function which converts an array of data values into a corresponding array + of True/False values. + +**For example**: + +The longest run of days with precipitation exceeding 10 (in cube data units) at +each grid location could be calculated with:: + + result = precip_cube.collapsed('time', iris.analysis.MAX_RUN, + function=lambda values: values > 10) + +This aggregator handles masked data, which it treats as interrupting a run. + +""" +MAX_RUN.name = lambda: "max_run" + + GMEAN = Aggregator("geometric_mean", scipy.stats.mstats.gmean) """ An :class:`~iris.analysis.Aggregator` instance that calculates the @@ -1732,33 +1983,48 @@ def interp_order(length): """ -PERCENTILE = PercentileAggregator(alphap=1, betap=1) +PERCENTILE = PercentileAggregator() """ -An :class:`~iris.analysis.PercentileAggregator` instance that calculates the +A :class:`~iris.analysis.PercentileAggregator` instance that calculates the percentile over a :class:`~iris.cube.Cube`, as computed by -:func:`scipy.stats.mstats.mquantiles`. +:func:`scipy.stats.mstats.mquantiles` (default) or :func:`numpy.percentile` (if +``fast_percentile_method`` is True). -**Required** kwargs associated with the use of this aggregator: +Parameters +---------- -* percent (float or sequence of floats): +percent : float or sequence of floats Percentile rank/s at which to extract value/s. -Additional kwargs associated with the use of this aggregator: - -* alphap (float): +alphap : float, default=1 Plotting positions parameter, see :func:`scipy.stats.mstats.mquantiles`. - Defaults to 1. -* betap (float): +betap : float, default=1 Plotting positions parameter, see :func:`scipy.stats.mstats.mquantiles`. - Defaults to 1. +fast_percentile_method : bool, default=False + When set to True, uses :func:`numpy.percentile` method as a faster + alternative to the :func:`scipy.stats.mstats.mquantiles` method. An + exception is raised if the data are masked and the missing data tolerance + is not 0. -**For example**: +**kwargs : dict, optional + Passed to :func:`scipy.stats.mstats.mquantiles` or :func:`numpy.percentile`. + +Example +------- To compute the 10th and 90th percentile over *time*:: result = cube.collapsed('time', iris.analysis.PERCENTILE, percent=[10, 90]) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. + +.. note:: + + Performance of this aggregator on lazy data is particularly sensitive to + the dask array chunking, so it may be useful to test with various chunk + sizes for a given application. Any chunking along the dimensions to be + aggregated is removed by the aggregator prior to calculating the + percentiles. """ @@ -1861,8 +2127,8 @@ def interp_order(length): SUM = WeightedAggregator( "sum", - iris._lazy_data.non_lazy(_lazy_sum), - lazy_func=_build_dask_mdtol_function(_lazy_sum), + _sum, + lazy_func=_build_dask_mdtol_function(_sum), ) """ An :class:`~iris.analysis.Aggregator` instance that calculates @@ -1985,7 +2251,9 @@ class _Groupby: """ - def __init__(self, groupby_coords, shared_coords=None): + def __init__( + self, groupby_coords, shared_coords=None, climatological=False + ): """ Determine the group slices over the group-by coordinates. @@ -2001,6 +2269,12 @@ def __init__(self, groupby_coords, shared_coords=None): that share the same group-by coordinate axis. The `int` identifies which dimension of the coord is on the group-by coordinate axis. + * climatological (bool): + Indicates whether the output is expected to be climatological. For + any aggregated time coord(s), this causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. + """ #: Group-by and shared coordinates that have been grouped. self.coords = [] @@ -2029,6 +2303,13 @@ def __init__(self, groupby_coords, shared_coords=None): for coord, dim in shared_coords: self._add_shared_coord(coord, dim) + # Aggregation is climatological in nature + self.climatological = climatological + + # Stores mapping from original cube coords to new ones, as metadata may + # not match + self.coord_replacement_mapping = [] + def _add_groupby_coord(self, coord): if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) @@ -2187,6 +2468,9 @@ def _compute_shared_coords(self): # Create new shared bounded coordinates. for coord, dim in self._shared_coords: + climatological_coord = ( + self.climatological and coord.units.is_time_reference() + ) if coord.points.dtype.kind in "SU": if coord.bounds is None: new_points = [] @@ -2225,6 +2509,7 @@ def _compute_shared_coords(self): maxmin_axis = (dim, -1) first_choices = coord.bounds.take(0, -1) last_choices = coord.bounds.take(1, -1) + else: # Derive new coord's bounds from points. item = coord.points @@ -2277,7 +2562,11 @@ def _compute_shared_coords(self): # Now create the new bounded group shared coordinate. try: - new_points = new_bounds.mean(-1) + if climatological_coord: + # Use the first bound as the point + new_points = new_bounds[..., 0] + else: + new_points = new_bounds.mean(-1) except TypeError: msg = ( "The {0!r} coordinate on the collapsing dimension" @@ -2286,17 +2575,19 @@ def _compute_shared_coords(self): raise ValueError(msg) try: - self.coords.append( - coord.copy(points=new_points, bounds=new_bounds) - ) + new_coord = coord.copy(points=new_points, bounds=new_bounds) except ValueError: # non monotonic points/bounds - self.coords.append( - iris.coords.AuxCoord.from_coord(coord).copy( - points=new_points, bounds=new_bounds - ) + new_coord = iris.coords.AuxCoord.from_coord(coord).copy( + points=new_points, bounds=new_bounds ) + if climatological_coord: + new_coord.climatological = True + self.coord_replacement_mapping.append((coord, new_coord)) + + self.coords.append(new_coord) + def __len__(self): """Calculate the number of groups given the group-by coordinates.""" @@ -2421,6 +2712,10 @@ def interpolator(self, cube, coords): dimensions in the result cube caused by scalar values in `sample_points`. + The N arrays of values within `sample_points` will be used to + create an N-d grid of points that will then be sampled (rather than + just N points) + The values for coordinates that correspond to date/times may optionally be supplied as datetime.datetime or cftime.datetime instances. diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 127aec7c1e..0b52f54568 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -120,7 +120,7 @@ def _angle(p, q, r): mid_lons = np.deg2rad(q[0]) pr = _3d_xyz_from_latlon(r[0], r[1]) - _3d_xyz_from_latlon(p[0], p[1]) - pr_norm = np.sqrt(np.sum(pr ** 2, axis=0)) + pr_norm = np.sqrt(np.sum(pr**2, axis=0)) pr_top = pr[1] * np.cos(mid_lons) - pr[0] * np.sin(mid_lons) index = pr_norm == 0 diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index c6b33c56a4..fc64249729 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -229,7 +229,7 @@ def compute_interp_weights(self, xi, method=None): xi_shape, method, indices, norm_distances, out_of_bounds = prepared # Allocate arrays for describing the sparse matrix. - n_src_values_per_result_value = 2 ** ndim + n_src_values_per_result_value = 2**ndim n_result_values = len(indices[0]) n_non_zero = n_result_values * n_src_values_per_result_value weights = np.ones(n_non_zero, dtype=norm_distances[0].dtype) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 409782f256..4630f47967 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -629,14 +629,10 @@ def curl(i_cube, j_cube, k_cube=None): # (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi) # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta) # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube) - if ( - y_coord.name() - not in [ - "latitude", - "grid_latitude", - ] - or x_coord.name() not in ["longitude", "grid_longitude"] - ): + if y_coord.name() not in [ + "latitude", + "grid_latitude", + ] or x_coord.name() not in ["longitude", "grid_longitude"]: raise ValueError( "Expecting latitude as the y coord and " "longitude as the x coord for spherical curl." diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 373487af53..44129ff175 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -335,7 +335,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): raise ValueError("Bounds must be [n,2] array") # fill in a new array of areas - radius_sqr = radius_of_earth ** 2 + radius_sqr = radius_of_earth**2 radian_lat_64 = radian_lat_bounds.astype(np.float64) radian_lon_64 = radian_lon_bounds.astype(np.float64) @@ -927,7 +927,7 @@ def _crs_distance_differentials(crs, x, y): """ # Make a true-latlon coordinate system for distance calculations. - crs_latlon = ccrs.Geodetic(globe=ccrs.Globe(ellipse="sphere")) + crs_latlon = ccrs.Geodetic(globe=crs.globe) # Transform points to true-latlon (just to get the true latitudes). _, true_lat = _transform_xy(crs, x, y, crs_latlon) # Get coordinate differentials w.r.t. true-latlon. @@ -1010,8 +1010,8 @@ def _transform_distance_vectors_tolerance_mask( # Squared magnitudes should be equal to one within acceptable tolerance. # A value of atol=2e-3 is used, which corresponds to a change in magnitude # of approximately 0.1%. - sqmag_1_0 = u_one_t ** 2 + v_zero_t ** 2 - sqmag_0_1 = u_zero_t ** 2 + v_one_t ** 2 + sqmag_1_0 = u_one_t**2 + v_zero_t**2 + sqmag_0_1 = u_zero_t**2 + v_one_t**2 mask = np.logical_not( np.logical_and( np.isclose(sqmag_1_0, ones, atol=2e-3), diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 107d964ed4..468847bca2 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -540,7 +540,7 @@ def power(data, out=None): return _math_op_common( cube, power, - cube.units ** exponent, + cube.units**exponent, new_dtype=new_dtype, in_place=in_place, ) @@ -774,6 +774,7 @@ def _binary_op_common( new_dtype=None, dim=None, in_place=False, + sanitise_metadata=True, ): """ Function which shares common code between binary operations. @@ -792,6 +793,8 @@ def _binary_op_common( coordinate that is not found in `cube` in_place - whether or not to apply the operation in place to `cube` and `cube.data` + sanitise_metadata - whether or not to remove metadata using + _sanitise_metadata function """ from iris.cube import Cube @@ -837,6 +840,20 @@ def unary_func(lhs): raise TypeError(emsg) return data + if in_place and not cube.has_lazy_data(): + # In-place arithmetic doesn't work if array type of LHS is less complex + # than RHS. + if iris._lazy_data.is_lazy_data(rhs): + cube.data = cube.lazy_data() + elif ma.is_masked(rhs) and not isinstance(cube.data, ma.MaskedArray): + cube.data = ma.array(cube.data) + + elif isinstance( + cube.core_data(), ma.MaskedArray + ) and iris._lazy_data.is_lazy_data(rhs): + # Workaround for #2987. numpy#15200 discusses the general problem. + cube = cube.copy(cube.lazy_data()) + result = _math_op_common( cube, unary_func, @@ -844,13 +861,15 @@ def unary_func(lhs): new_dtype=new_dtype, in_place=in_place, skeleton_cube=skeleton_cube, + sanitise_metadata=sanitise_metadata, ) if isinstance(other, Cube): # Insert the resultant data from the maths operation # within the resolved cube. result = resolver.cube(result.core_data(), in_place=in_place) - _sanitise_metadata(result, new_unit) + if sanitise_metadata: + _sanitise_metadata(result, new_unit) return result @@ -932,6 +951,7 @@ def _math_op_common( new_dtype=None, in_place=False, skeleton_cube=False, + sanitise_metadata=True, ): from iris.cube import Cube @@ -965,7 +985,8 @@ def _math_op_common( ): new_cube.data = ma.masked_array(0, 1, dtype=new_dtype) - _sanitise_metadata(new_cube, new_unit) + if sanitise_metadata: + _sanitise_metadata(new_cube, new_unit) return new_cube diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 89dde1818b..711e3c5bfb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -168,10 +168,10 @@ def _ones_like(cube): covar = (s1 * s2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol ) - var_1 = (s1 ** 2).collapsed( + var_1 = (s1**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1 ) - var_2 = (s2 ** 2).collapsed( + var_2 = (s2**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_2 ) diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index d5fac9d108..946ae1cb2c 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -320,20 +320,59 @@ def interpolate(cube, sample_points, method=None): break if method in ["linear", None]: - for i in range(trajectory_size): - point = [(coord, values[i]) for coord, values in sample_points] - column = cube.interpolate(point, Linear()) - new_cube.data[..., i] = column.data - # Fill in the empty squashed (non derived) coords. - for column_coord in column.dim_coords + column.aux_coords: - src_dims = cube.coord_dims(column_coord) - if not squish_my_dims.isdisjoint(src_dims): - if len(column_coord.points) != 1: - msg = "Expected to find exactly one point. Found {}." - raise Exception(msg.format(column_coord.points)) - new_cube.coord(column_coord.name()).points[ - i - ] = column_coord.points[0] + # Using cube.interpolate will generate extra values that we don't need + # as it makes a grid from the provided coordinates (like a meshgrid) + # and then does interpolation for all of them. This is memory + # inefficient, but significantly more time efficient than calling + # cube.interpolate (or the underlying method on the interpolator) + # repeatedly, so using this approach for now. In future, it would be + # ideal if we only interpolated at the points we care about + columns = cube.interpolate(sample_points, Linear()) + # np.einsum(a, [0, 0], [0]) is like np.diag(a) + # We're using einsum here to do an n-dimensional diagonal, leaving the + # other dimensions unaffected and putting the diagonal's direction on + # the final axis + initial_inds = list(range(1, columns.ndim + 1)) + for ind in squish_my_dims: + initial_inds[ind] = 0 + final_inds = list(filter(lambda x: x != 0, initial_inds)) + [0] + new_cube.data = np.einsum(columns.data, initial_inds, final_inds) + + # Fill in the empty squashed (non derived) coords. + # We're using the same einstein summation plan as for the cube, but + # redoing those indices to match the indices in the coordinates + for columns_coord in columns.dim_coords + columns.aux_coords: + src_dims = cube.coord_dims(columns_coord) + if not squish_my_dims.isdisjoint(src_dims): + # Mapping the cube indicies onto the coord + initial_coord_inds = [initial_inds[ind] for ind in src_dims] + # Making the final ones the same way as for the cube + # 0 will always appear in the initial ones because we know this + # coord overlaps the squish dims + final_coord_inds = list( + filter(lambda x: x != 0, initial_coord_inds) + ) + [0] + new_coord_points = np.einsum( + columns_coord.points, initial_coord_inds, final_coord_inds + ) + # Check we're not overwriting coord.points with the wrong shape + if ( + not new_cube.coord(columns_coord.name()).points.shape + == new_coord_points.shape + ): + msg = ( + "Coord {} was expected to have new points of shape {}. " + "Found shape of {}." + ) + raise ValueError( + msg.format( + columns_coord.name(), + new_cube.coord(columns_coord.name()).points.shape, + new_coord_points.shape, + ) + ) + # Replace the points + new_cube.coord(columns_coord.name()).points = new_coord_points elif method == "nearest": # Use a cache with _nearest_neighbour_indices_ndcoords() diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index cb5f53f5f4..8ec39bb4b1 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -44,8 +44,6 @@ # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name -from ..util import guess_coord_axis - _TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") # Configure the logger. @@ -1413,6 +1411,8 @@ def metadata_filter( to only those that matched the given criteria. """ + from ..util import guess_coord_axis + name = None obj = None diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 12db64cafe..a0c97dfc00 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -13,7 +13,9 @@ from collections import namedtuple from collections.abc import Iterable +from dataclasses import dataclass import logging +from typing import Any from dask.array.core import broadcast_shapes import numpy as np @@ -56,10 +58,42 @@ _PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"]) -_PreparedItem = namedtuple( - "PreparedItem", - ["metadata", "points", "bounds", "dims", "container"], -) + +@dataclass +class _PreparedItem: + metadata: Any + points: Any + bounds: Any + dims: Any + container: Any + mesh: Any = None + location: Any = None + axis: Any = None + + def create_coord(self, metadata): + from iris.experimental.ugrid.mesh import MeshCoord + + if issubclass(self.container, MeshCoord): + # Make a MeshCoord, for which we have mesh/location/axis. + result = MeshCoord( + mesh=self.mesh, + location=self.location, + axis=self.axis, + ) + # Note: in this case we do also have "prepared metadata", but we + # do *not* assign it as we do for an 'ordinary' Coord. + # Instead, MeshCoord name/units/attributes are immutable, and set at + # create time to those of the underlying mesh node coordinate. + # cf https://github.com/SciTools/iris/issues/4670 + + else: + # make a regular coord, for which we have points/bounds/metadata. + result = self.container(self.points, bounds=self.bounds) + # Also assign prepared metadata. + result.metadata = metadata + + return result + _PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"]) @@ -646,7 +680,13 @@ def _categorise_items(cube): @staticmethod def _create_prepared_item( - coord, dims, src_metadata=None, tgt_metadata=None + coord, + dims, + src_metadata=None, + tgt_metadata=None, + points=None, + bounds=None, + container=None, ): """ Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` @@ -658,8 +698,10 @@ def _create_prepared_item( * coord: The coordinate with the ``points`` and ``bounds`` to be extracted. - * dims: - The dimensions that the ``coord`` spans on the resulting resolved :class:`~iris.cube.Cube`. + * dims (int or tuple): + The dimensions that the ``coord`` spans on the resulting resolved + :class:`~iris.cube.Cube`. + (Can also be a single dimension number). * src_metadata: The coordinate metadata from the ``src`` :class:`~iris.cube.Cube`. @@ -667,26 +709,85 @@ def _create_prepared_item( * tgt_metadata: The coordinate metadata from the ``tgt`` :class:`~iris.cube.Cube`. + * points: + Override points array. When not given, use coord.points. + + * bounds: + Override bounds array. When not given, use coord.bounds. + + * container: + Override coord type (class constructor). + When not given, use type(coord). + Returns: The :class:`~iris.common.resolve._PreparedItem`. + .. note:: + + If container or type(coord) is DimCoord/AuxCoord (i.e. not + MeshCoord), then points+bounds define the built AuxCoord/DimCoord. + Theses points+bounds come either from those args, or the 'coord'. + Alternatively, when container or type(coord) is MeshCoord, then + points==bounds==None and the preparted item contains + mesh/location/axis properties for the resulting MeshCoord. + These don't have override args: they *always* come from 'coord'. + """ + if not isinstance(dims, Iterable): + dims = (dims,) + if src_metadata is not None and tgt_metadata is not None: combined = src_metadata.combine(tgt_metadata) else: combined = src_metadata or tgt_metadata - if not isinstance(dims, Iterable): - dims = (dims,) prepared_metadata = _PreparedMetadata( combined=combined, src=src_metadata, tgt=tgt_metadata ) - bounds = coord.bounds + + if container is None: + container = type(coord) + + from iris.experimental.ugrid.mesh import MeshCoord + + if issubclass(container, MeshCoord): + # Build a prepared-item to make a MeshCoord. + # This case does *NOT* use points + bounds, so alternatives to the + # coord content should not have been specified by the caller. + assert points is None and bounds is None + mesh = coord.mesh + location = coord.location + axis = coord.axis + + else: + # Build a prepared-item to make a DimCoord or AuxCoord. + + # mesh/location/axis are not used. + mesh = None + location = None + axis = None + + # points + bounds default to those from the coordinate, but + # alternative values may be specified. + if points is None: + points = coord.points + bounds = coord.bounds + # 'ELSE' points was passed: both points+bounds come from the args + + # Always *copy* points+bounds, to avoid any possible direct (shared) + # references to existing coord arrays. + points = points.copy() + if bounds is not None: + bounds = bounds.copy() + result = _PreparedItem( metadata=prepared_metadata, - points=coord.points.copy(), - bounds=bounds if bounds is None else bounds.copy(), dims=dims, - container=type(coord), + points=points, + bounds=bounds, + mesh=mesh, + location=location, + axis=axis, + container=container, ) return result @@ -1422,30 +1523,64 @@ def _prepare_common_aux_payload( (tgt_item,) = tgt_items src_coord = src_item.coord tgt_coord = tgt_item.coord - points, bounds = self._prepare_points_and_bounds( - src_coord, - tgt_coord, - src_item.dims, - tgt_item.dims, - ignore_mismatch=ignore_mismatch, - ) - if points is not None: - src_type = type(src_coord) - tgt_type = type(tgt_coord) - # Downcast to aux if there are mixed container types. - container = src_type if src_type is tgt_type else AuxCoord - prepared_metadata = _PreparedMetadata( - combined=src_metadata.combine(tgt_item.metadata), - src=src_metadata, - tgt=tgt_item.metadata, - ) - prepared_item = _PreparedItem( - metadata=prepared_metadata, - points=points.copy(), - bounds=bounds if bounds is None else bounds.copy(), - dims=tgt_item.dims, - container=container, + + prepared_item = None + src_is_mesh, tgt_is_mesh = [ + hasattr(coord, "mesh") for coord in (src_coord, tgt_coord) + ] + if src_is_mesh and tgt_is_mesh: + # MeshCoords are a bit "special" ... + # In this case, we may need to produce an alternative form + # to the 'ordinary' _PreparedItem + # However, this only works if they have identical meshes.. + if src_coord == tgt_coord: + prepared_item = self._create_prepared_item( + src_coord, + tgt_item.dims, + src_metadata=src_metadata, + tgt_metadata=tgt_item.metadata, + ) + else: + emsg = ( + f"Mesh coordinate {src_coord.name()!r} does not match between the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + + if prepared_item is None: + # Make a "normal" _PreparedItem, which is specified using + # points + bounds arrays. + # First, convert any un-matching MeshCoords to AuxCoord + if src_is_mesh: + src_coord = AuxCoord.from_coord(src_coord) + if tgt_is_mesh: + tgt_coord = AuxCoord.from_coord(tgt_coord) + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_item.dims, + tgt_item.dims, + ignore_mismatch=ignore_mismatch, ) + if points is not None: + src_type = type(src_coord) + tgt_type = type(tgt_coord) + # Downcast to aux if there are mixed container types. + container = ( + src_type if src_type is tgt_type else AuxCoord + ) + prepared_item = self._create_prepared_item( + src_coord, + tgt_item.dims, + src_metadata=src_metadata, + tgt_metadata=tgt_item.metadata, + points=points, + bounds=bounds, + container=container, + ) + + if prepared_item is not None: prepared_items.append(prepared_item) def _prepare_common_dim_payload( @@ -1499,16 +1634,13 @@ def _prepare_common_dim_payload( ) if points is not None: - prepared_metadata = _PreparedMetadata( - combined=src_metadata.combine(tgt_metadata), - src=src_metadata, - tgt=tgt_metadata, - ) - prepared_item = _PreparedItem( - metadata=prepared_metadata, - points=points.copy(), - bounds=bounds if bounds is None else bounds.copy(), - dims=(tgt_dim,), + prepared_item = self._create_prepared_item( + src_coord, + tgt_dim, + src_metadata=src_metadata, + tgt_metadata=tgt_metadata, + points=points, + bounds=bounds, container=DimCoord, ) self.prepared_category.items_dim.append(prepared_item) @@ -2333,8 +2465,7 @@ def cube(self, data, in_place=False): # Add the prepared dim coordinates. for item in self.prepared_category.items_dim: - coord = item.container(item.points, bounds=item.bounds) - coord.metadata = item.metadata.combined + coord = item.create_coord(metadata=item.metadata.combined) result.add_dim_coord(coord, item.dims) # Add the prepared aux and scalar coordinates. @@ -2343,8 +2474,8 @@ def cube(self, data, in_place=False): + self.prepared_category.items_scalar ) for item in prepared_aux_coords: - coord = item.container(item.points, bounds=item.bounds) - coord.metadata = item.metadata.combined + # These items are "special" + coord = item.create_coord(metadata=item.metadata.combined) try: result.add_aux_coord(coord, item.dims) except ValueError as err: diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 2f875bb159..802571925e 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -9,6 +9,7 @@ """ from abc import ABCMeta, abstractmethod +from functools import cached_property import warnings import cartopy.crs as ccrs @@ -53,10 +54,28 @@ class CoordSystem(metaclass=ABCMeta): grid_mapping_name = None def __eq__(self, other): - return ( - self.__class__ == other.__class__ - and self.__dict__ == other.__dict__ - ) + """ + Override equality + + The `_globe` and `_crs` attributes are not compared because they are + cached properties and completely derived from other attributes. The + nature of caching means that they can appear on one object and not on + another despite the objects being identical, and them being completely + derived from other attributes means they will only differ if other + attributes that are being tested for equality differ. + """ + if self.__class__ != other.__class__: + return False + self_keys = set(self.__dict__.keys()) + other_keys = set(other.__dict__.keys()) + check_keys = (self_keys | other_keys) - {"_globe", "_crs"} + for key in check_keys: + try: + if self.__dict__[key] != other.__dict__[key]: + return False + except KeyError: + return False + return True def __ne__(self, other): # Must supply __ne__, Python does not defer to __eq__ for @@ -122,11 +141,17 @@ def as_cartopy_projection(self): pass +_short_datum_names = { + "OSGB 1936": "OSGB36", + "OSGB_1936": "OSGB36", + "WGS 84": "WGS84", +} + + class GeogCS(CoordSystem): """ A geographic (ellipsoidal) coordinate system, defined by the shape of the Earth and a prime meridian. - """ grid_mapping_name = "latitude_longitude" @@ -139,34 +164,35 @@ def __init__( longitude_of_prime_meridian=None, ): """ - Creates a new GeogCS. - - Kwargs: + Create a new GeogCS. + Parameters + ---------- * semi_major_axis, semi_minor_axis: - Axes of ellipsoid, in metres. At least one must be given - (see note below). - + Axes of ellipsoid, in metres. At least one must be given (see note + below). * inverse_flattening: - Can be omitted if both axes given (see note below). - Defaults to 0.0 . - + Can be omitted if both axes given (see note below). Default 0.0 * longitude_of_prime_meridian: - Specifies the prime meridian on the ellipsoid, in degrees. - Defaults to 0.0 . + Specifies the prime meridian on the ellipsoid, in degrees. Default 0.0 + Notes + ----- If just semi_major_axis is set, with no semi_minor_axis or inverse_flattening, then a perfect sphere is created from the given radius. - If just two of semi_major_axis, semi_minor_axis, and - inverse_flattening are given the missing element is calculated from the - formula: + If just two of semi_major_axis, semi_minor_axis, and inverse_flattening + are given the missing element is calculated from the formula: :math:`flattening = (major - minor) / major` Currently, Iris will not allow over-specification (all three ellipsoid parameters). + After object creation, altering any of these properties will not update + the others. semi_major_axis and semi_minor_axis are used when creating + Cartopy objects. + Examples:: cs = GeogCS(6371229) @@ -233,13 +259,15 @@ def __init__( raise ValueError("Insufficient ellipsoid specification") #: Major radius of the ellipsoid in metres. - self.semi_major_axis = float(semi_major_axis) + self._semi_major_axis = float(semi_major_axis) #: Minor radius of the ellipsoid in metres. - self.semi_minor_axis = float(semi_minor_axis) + self._semi_minor_axis = float(semi_minor_axis) #: :math:`1/f` where :math:`f = (a-b)/a`. - self.inverse_flattening = float(inverse_flattening) + self._inverse_flattening = float(inverse_flattening) + + self._datum = None #: Describes 'zero' on the ellipsoid in degrees. self.longitude_of_prime_meridian = _arg_default( @@ -257,6 +285,14 @@ def _pretty_attrs(self): self.longitude_of_prime_meridian, ) ) + # An unknown crs datum will be treated as None + if self.datum is not None and self.datum != "unknown": + attrs.append( + ( + "datum", + self.datum, + ) + ) return attrs def __repr__(self): @@ -294,7 +330,7 @@ def xml_element(self, doc): return CoordSystem.xml_element(self, doc, attrs) def as_cartopy_crs(self): - return ccrs.Geodetic(self.as_cartopy_globe()) + return self._crs def as_cartopy_projection(self): return ccrs.PlateCarree( @@ -303,14 +339,161 @@ def as_cartopy_projection(self): ) def as_cartopy_globe(self): - # Explicitly set `ellipse` to None as a workaround for - # Cartopy setting WGS84 as the default. - return ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, + return self._globe + + @cached_property + def _globe(self): + """ + A representation of this CRS as a Cartopy Globe. + + Note + ---- + This property is created when required and then cached for speed. That + cached value is cleared when an assignment is made to a property of the + class that invalidates the cache. + """ + if self._datum is not None: + short_datum = _short_datum_names.get(self._datum, self._datum) + # Cartopy doesn't actually enact datums unless they're provided without + # ellipsoid axes, so only provide the datum + return ccrs.Globe(short_datum, ellipse=None) + else: + return ccrs.Globe( + ellipse=None, + semimajor_axis=self._semi_major_axis, + semiminor_axis=self._semi_minor_axis, + ) + + @cached_property + def _crs(self): + """ + A representation of this CRS as a Cartopy CRS. + + Note + ---- + This property is created when required and then cached for speed. That + cached value is cleared when an assignment is made to a property of the + class that invalidates the cache. + """ + return ccrs.Geodetic(self._globe) + + def _wipe_cached_properties(self): + """ + Wipes the cached properties on the object as part of any update to a + value that invalidates the cache. + """ + try: + delattr(self, "_crs") + except AttributeError: + pass + try: + delattr(self, "_globe") + except AttributeError: + pass + + @property + def semi_major_axis(self): + if self._semi_major_axis is not None: + return self._semi_major_axis + else: + return self._crs.ellipsoid.semi_major_metre + + @semi_major_axis.setter + def semi_major_axis(self, value): + """ + Setting this property to a different value invalidates the current datum + (if any) because a datum encodes a specific semi-major axis. This also + invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ + value = float(value) + if not np.isclose(self.semi_major_axis, value): + self._datum = None + self._wipe_cached_properties() + self._semi_major_axis = value + + @property + def semi_minor_axis(self): + if self._semi_minor_axis is not None: + return self._semi_minor_axis + else: + return self._crs.ellipsoid.semi_minor_metre + + @semi_minor_axis.setter + def semi_minor_axis(self, value): + """ + Setting this property to a different value invalidates the current datum + (if any) because a datum encodes a specific semi-minor axis. This also + invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ + value = float(value) + if not np.isclose(self.semi_minor_axis, value): + self._datum = None + self._wipe_cached_properties() + self._semi_minor_axis = value + + @property + def inverse_flattening(self): + if self._inverse_flattening is not None: + return self._inverse_flattening + else: + self._crs.ellipsoid.inverse_flattening + + @inverse_flattening.setter + def inverse_flattening(self, value): + """ + Setting this property to a different value does not affect the behaviour + of this object any further than the value of this property. + """ + wmsg = ( + "Setting inverse_flattening does not affect other properties of " + "the GeogCS object. To change other properties set them explicitly" + " or create a new GeogCS instance." + ) + warnings.warn(wmsg, UserWarning) + value = float(value) + self._inverse_flattening = value + + @property + def datum(self): + if self._datum is None: + return None + else: + datum = self._datum + return datum + + @datum.setter + def datum(self, value): + """ + Setting this property to a different value invalidates the current + values of the ellipsoid measurements because a datum encodes its own + ellipse. This also invalidates the cached `cartopy.Globe` and + `cartopy.CRS`. + """ + if self._datum != value: + self._semi_major_axis = None + self._semi_minor_axis = None + self._inverse_flattening = None + self._wipe_cached_properties() + self._datum = value + + @classmethod + def from_datum(cls, datum, longitude_of_prime_meridian=None): + + crs = super().__new__(cls) + + crs._semi_major_axis = None + crs._semi_minor_axis = None + crs._inverse_flattening = None + + #: Describes 'zero' on the ellipsoid in degrees. + crs.longitude_of_prime_meridian = _arg_default( + longitude_of_prime_meridian, 0 ) + crs._datum = datum + + return crs + class RotatedGeogCS(CoordSystem): """ @@ -878,32 +1061,39 @@ def __init__( false_northing=None, true_scale_lat=None, ellipsoid=None, + scale_factor_at_projection_origin=None, ): """ Constructs a Stereographic coord system. - Args: + Parameters + ---------- - * central_lat: + central_lat : float The latitude of the pole. - * central_lon: + central_lon : float The central longitude, which aligns with the y axis. - Kwargs: - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . + false_easting : float, optional + X offset from planar origin in metres. - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . + false_northing : float, optional + Y offset from planar origin in metres. - * true_scale_lat: + true_scale_lat : float, optional Latitude of true scale. - * ellipsoid (:class:`GeogCS`): + scale_factor_at_projection_origin : float, optional + Scale factor at the origin of the projection + + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. + Notes + ----- + It is only valid to provide one of true_scale_lat and scale_factor_at_projection_origin + """ #: True latitude of planar origin in degrees. @@ -922,27 +1112,42 @@ def __init__( self.true_scale_lat = _arg_default( true_scale_lat, None, cast_as=_float_or_None ) - # N.B. the way we use this parameter, we need it to default to None, + #: Scale factor at projection origin. + self.scale_factor_at_projection_origin = _arg_default( + scale_factor_at_projection_origin, None, cast_as=_float_or_None + ) + # N.B. the way we use these parameters, we need them to default to None, # and *not* to 0.0 . + if ( + self.true_scale_lat is not None + and self.scale_factor_at_projection_origin is not None + ): + raise ValueError( + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "true_scale_latitude". ' + ) + #: Ellipsoid definition (:class:`GeogCS` or None). self.ellipsoid = ellipsoid - def __repr__(self): - return ( - "Stereographic(central_lat={!r}, central_lon={!r}, " - "false_easting={!r}, false_northing={!r}, " - "true_scale_lat={!r}, " - "ellipsoid={!r})".format( - self.central_lat, - self.central_lon, - self.false_easting, - self.false_northing, - self.true_scale_lat, - self.ellipsoid, + def _repr_attributes(self): + if self.scale_factor_at_projection_origin is None: + scale_info = "true_scale_lat={!r}, ".format(self.true_scale_lat) + else: + scale_info = "scale_factor_at_projection_origin={!r}, ".format( + self.scale_factor_at_projection_origin ) + return ( + f"(central_lat={self.central_lat}, central_lon={self.central_lon}, " + f"false_easting={self.false_easting}, false_northing={self.false_northing}, " + f"{scale_info}" + f"ellipsoid={self.ellipsoid})" ) + def __repr__(self): + return "Stereographic" + self._repr_attributes() + def as_cartopy_crs(self): globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) @@ -952,6 +1157,7 @@ def as_cartopy_crs(self): self.false_easting, self.false_northing, self.true_scale_lat, + self.scale_factor_at_projection_origin, globe=globe, ) @@ -959,6 +1165,73 @@ def as_cartopy_projection(self): return self.as_cartopy_crs() +class PolarStereographic(Stereographic): + """ + A subclass of the stereographic map projection centred on a pole. + + """ + + grid_mapping_name = "polar_stereographic" + + def __init__( + self, + central_lat, + central_lon, + false_easting=None, + false_northing=None, + true_scale_lat=None, + scale_factor_at_projection_origin=None, + ellipsoid=None, + ): + """ + Construct a Polar Stereographic coord system. + + Parameters + ---------- + + central_lat : {90, -90} + The latitude of the pole. + + central_lon : float + The central longitude, which aligns with the y axis. + + false_easting : float, optional + X offset from planar origin in metres. + + false_northing : float, optional + Y offset from planar origin in metres. + + true_scale_lat : float, optional + Latitude of true scale. + + scale_factor_at_projection_origin : float, optional + Scale factor at the origin of the projection + + ellipsoid : :class:`GeogCS`, optional + If given, defines the ellipsoid. + + Notes + ----- + It is only valid to provide at most one of `true_scale_lat` and + `scale_factor_at_projection_origin`. + + + """ + + super().__init__( + central_lat=central_lat, + central_lon=central_lon, + false_easting=false_easting, + false_northing=false_northing, + true_scale_lat=true_scale_lat, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ellipsoid=ellipsoid, + ) + + def __repr__(self): + return "PolarStereographic" + self._repr_attributes() + + class LambertConformal(CoordSystem): """ A coordinate system in the Lambert Conformal conic projection. @@ -1083,6 +1356,9 @@ def __init__( longitude_of_projection_origin=None, ellipsoid=None, standard_parallel=None, + scale_factor_at_projection_origin=None, + false_easting=None, + false_northing=None, ): """ Constructs a Mercator coord system. @@ -1098,6 +1374,22 @@ def __init__( * standard_parallel: The latitude where the scale is 1. Defaults to 0.0 . + * scale_factor_at_projection_origin: + Scale factor at natural origin. Defaults to unused. + + * false_easting: + X offset from the planar origin in metres. Defaults to 0.0. + + * false_northing: + Y offset from the planar origin in metres. Defaults to 0.0. + + * datum: + If given, specifies the datumof the coordinate system. Only + respected if iris.Future.daum_support is set. + + Note: Only one of ``standard_parallel`` and + ``scale_factor_at_projection_origin`` should be included. + """ #: True longitude of planar origin in degrees. self.longitude_of_projection_origin = _arg_default( @@ -1107,15 +1399,41 @@ def __init__( #: Ellipsoid definition (:class:`GeogCS` or None). self.ellipsoid = ellipsoid + # Initialise to None, then set based on arguments #: The latitude where the scale is 1. - self.standard_parallel = _arg_default(standard_parallel, 0) + self.standard_parallel = None + # The scale factor at the origin of the projection + self.scale_factor_at_projection_origin = None + if scale_factor_at_projection_origin is None: + self.standard_parallel = _arg_default(standard_parallel, 0) + else: + if standard_parallel is None: + self.scale_factor_at_projection_origin = _arg_default( + scale_factor_at_projection_origin, 0 + ) + else: + raise ValueError( + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and ' + '"standard_parallel".' + ) + + #: X offset from the planar origin in metres. + self.false_easting = _arg_default(false_easting, 0) + + #: Y offset from the planar origin in metres. + self.false_northing = _arg_default(false_northing, 0) def __repr__(self): res = ( "Mercator(longitude_of_projection_origin=" "{self.longitude_of_projection_origin!r}, " "ellipsoid={self.ellipsoid!r}, " - "standard_parallel={self.standard_parallel!r})" + "standard_parallel={self.standard_parallel!r}, " + "scale_factor_at_projection_origin=" + "{self.scale_factor_at_projection_origin!r}, " + "false_easting={self.false_easting!r}, " + "false_northing={self.false_northing!r})" ) return res.format(self=self) @@ -1126,6 +1444,9 @@ def as_cartopy_crs(self): central_longitude=self.longitude_of_projection_origin, globe=globe, latitude_true_scale=self.standard_parallel, + scale_factor=self.scale_factor_at_projection_origin, + false_easting=self.false_easting, + false_northing=self.false_northing, ) def as_cartopy_projection(self): diff --git a/lib/iris/coords.py b/lib/iris/coords.py index b236d407da..d0d471a634 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -12,6 +12,7 @@ from collections import namedtuple from collections.abc import Container, Iterator import copy +from functools import lru_cache from itertools import chain, zip_longest import operator import warnings @@ -1360,7 +1361,9 @@ def __eq__(self, other): else: return self.point == other elif isinstance(other, Cell): - return (self.point == other.point) and (self.bound == other.bound) + return (self.point == other.point) and ( + self.bound == other.bound or self.bound == other.bound[::-1] + ) elif ( isinstance(other, str) and self.bound is None @@ -2212,12 +2215,24 @@ def serialize(x): "Metadata may not be fully descriptive for {!r}." ) warnings.warn(msg.format(self.name())) - elif not self.is_contiguous(): - msg = ( - "Collapsing a non-contiguous coordinate. " - "Metadata may not be fully descriptive for {!r}." - ) - warnings.warn(msg.format(self.name())) + else: + try: + self._sanity_check_bounds() + except ValueError as exc: + msg = ( + "Cannot check if coordinate is contiguous: {} " + "Metadata may not be fully descriptive for {!r}. " + "Ignoring bounds." + ) + warnings.warn(msg.format(str(exc), self.name())) + self.bounds = None + else: + if not self.is_contiguous(): + msg = ( + "Collapsing a non-contiguous coordinate. " + "Metadata may not be fully descriptive for {!r}." + ) + warnings.warn(msg.format(self.name())) if self.has_bounds(): item = self.core_bounds() @@ -2440,7 +2455,9 @@ def nearest_neighbour_index(self, point): if self.has_bounds(): # make bounds ranges complete+separate, so point is in at least one increasing = self.bounds[0, 1] > self.bounds[0, 0] - bounds = bounds.copy() + # identify data type that bounds and point can safely cast to + dtype = np.result_type(bounds, point) + bounds = bounds.astype(dtype) # sort the bounds cells by their centre values sort_inds = np.argsort(np.mean(bounds, axis=1)) bounds = bounds[sort_inds] @@ -2519,6 +2536,10 @@ def _xml_id_extra(self, unique_value): return unique_value +_regular_points = lru_cache(iris.util.regular_points) +"""Caching version of iris.util.regular_points""" + + class DimCoord(Coord): """ A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not @@ -2566,12 +2587,9 @@ def from_regular( bounds values will be defined. Defaults to False. """ - points = (zeroth + step) + step * np.arange(count, dtype=np.float32) - _, regular = iris.util.points_step(points) - if not regular: - points = (zeroth + step) + step * np.arange( - count, dtype=np.float64 - ) + # Use lru_cache because this is done repeatedly with the same arguments + # (particularly in field-based file loading). + points = _regular_points(zeroth, step, count).copy() points.flags.writeable = False if with_bounds: @@ -2803,6 +2821,10 @@ def _new_bounds_requirements(self, bounds): * bounds are not masked, and * bounds are monotonic in the first dimension. + Also reverse the order of the second dimension if necessary to match the + first dimension's direction. I.e. both should increase or both should + decrease. + """ # Ensure the bounds are a compatible shape. if self.shape != bounds.shape[:-1] and not ( @@ -2852,6 +2874,16 @@ def _new_bounds_requirements(self, bounds): emsg.format(self.name(), self.__class__.__name__) ) + if n_bounds == 2: + # Make ordering of bounds consistent with coord's direction + # if possible. + (direction,) = directions + diffs = bounds[:, 0] - bounds[:, 1] + if np.all(np.sign(diffs) == direction): + bounds = np.flip(bounds, axis=1) + + return bounds + @Coord.bounds.setter def bounds(self, bounds): if bounds is not None: @@ -2860,8 +2892,9 @@ def bounds(self, bounds): # Make sure we have an array (any type of array). bounds = np.asanyarray(bounds) - # Check validity requirements for dimension-coordinate bounds. - self._new_bounds_requirements(bounds) + # Check validity requirements for dimension-coordinate bounds and reverse + # trailing dimension if necessary. + bounds = self._new_bounds_requirements(bounds) # Cast to a numpy array for masked arrays with no mask. bounds = np.array(bounds) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index b456bd9663..8879ade621 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -152,19 +152,13 @@ class CubeList(list): """ - def __new__(cls, list_of_cubes=None): - """Given a :class:`list` of cubes, return a CubeList instance.""" - cube_list = list.__new__(cls, list_of_cubes) - - # Check that all items in the incoming list are cubes. Note that this - # checking does not guarantee that a CubeList instance *always* has - # just cubes in its list as the append & __getitem__ methods have not - # been overridden. - if not all([isinstance(cube, Cube) for cube in cube_list]): - raise ValueError( - "All items in list_of_cubes must be Cube " "instances." - ) - return cube_list + def __init__(self, *args, **kwargs): + """Given an iterable of cubes, return a CubeList instance.""" + # Do whatever a list does, to initialise ourself "as a list" + super().__init__(*args, **kwargs) + # Check that all items in the list are cubes. + for cube in self: + self._assert_is_cube(cube) def __str__(self): """Runs short :meth:`Cube.summary` on every cube.""" @@ -182,13 +176,17 @@ def __repr__(self): """Runs repr on every cube.""" return "[%s]" % ",\n".join([repr(cube) for cube in self]) - def _repr_html_(self): - from iris.experimental.representation import CubeListRepresentation - - representer = CubeListRepresentation(self) - return representer.repr_html() + @staticmethod + def _assert_is_cube(obj): + if not hasattr(obj, "add_aux_coord"): + msg = ( + r"Object {obj} cannot be put in a cubelist, " + "as it is not a Cube." + ) + raise ValueError(msg) # TODO #370 Which operators need overloads? + def __add__(self, other): return CubeList(list.__add__(self, other)) @@ -210,6 +208,48 @@ def __getslice__(self, start, stop): result = CubeList(result) return result + def __iadd__(self, other_cubes): + """ + Add a sequence of cubes to the cubelist in place. + """ + return super(CubeList, self).__iadd__(CubeList(other_cubes)) + + def __setitem__(self, key, cube_or_sequence): + """Set self[key] to cube or sequence of cubes""" + if isinstance(key, int): + # should have single cube. + self._assert_is_cube(cube_or_sequence) + else: + # key is a slice (or exception will come from list method). + cube_or_sequence = CubeList(cube_or_sequence) + + super(CubeList, self).__setitem__(key, cube_or_sequence) + + def append(self, cube): + """ + Append a cube. + """ + self._assert_is_cube(cube) + super(CubeList, self).append(cube) + + def extend(self, other_cubes): + """ + Extend cubelist by appending the cubes contained in other_cubes. + + Args: + + * other_cubes: + A cubelist or other sequence of cubes. + """ + super(CubeList, self).extend(CubeList(other_cubes)) + + def insert(self, index, cube): + """ + Insert a cube before index. + """ + self._assert_is_cube(cube) + super(CubeList, self).insert(index, cube) + def xml(self, checksum=False, order=True, byteorder=True): """Return a string of the XML that this list of cubes represents.""" @@ -972,6 +1012,30 @@ def _names(self): """ return self._metadata_manager._names + def _dimensional_metadata(self, name_or_dimensional_metadata): + """ + Return a single _DimensionalMetadata instance that matches the given + name_or_dimensional_metadata. If one is not found, raise an error. + + """ + found_item = None + for cube_method in [ + self.coord, + self.cell_measure, + self.ancillary_variable, + ]: + try: + found_item = cube_method(name_or_dimensional_metadata) + if found_item: + break + except KeyError: + pass + if not found_item: + raise KeyError( + f"{name_or_dimensional_metadata} was not found in {self}." + ) + return found_item + def is_compatible(self, other, ignore=None): """ Return whether the cube is compatible with another. @@ -1188,7 +1252,16 @@ def add_aux_factory(self, aux_factory): "Factory must be a subclass of " "iris.aux_factory.AuxCoordFactory." ) - cube_coords = self.coords() + + # Get all 'real' coords (i.e. not derived ones) : use private data + # rather than cube.coords(), as that is quite slow. + def coordsonly(coords_and_dims): + return [coord for coord, dims in coords_and_dims] + + cube_coords = coordsonly(self._dim_coords_and_dims) + coordsonly( + self._aux_coords_and_dims + ) + for dependency in aux_factory.dependencies: ref_coord = aux_factory.dependencies[dependency] if ref_coord is not None and ref_coord not in cube_coords: @@ -3713,6 +3786,10 @@ def collapsed(self, coords, aggregator, **kwargs): for coord in coords: dims_to_collapse.update(self.coord_dims(coord)) + if aggregator.name() == "max_run" and len(dims_to_collapse) > 1: + msg = "Not possible to calculate runs over more than one dimension" + raise ValueError(msg) + if not dims_to_collapse: msg = ( "Cannot collapse a dimension which does not describe any " @@ -3818,6 +3895,7 @@ def collapsed(self, coords, aggregator, **kwargs): data_result = aggregator.aggregate( unrolled_data, axis=-1, **kwargs ) + aggregator.update_metadata( collapsed_cube, coords, axis=collapse_axis, **kwargs ) @@ -3826,36 +3904,53 @@ def collapsed(self, coords, aggregator, **kwargs): ) return result - def aggregated_by(self, coords, aggregator, **kwargs): + def aggregated_by( + self, coords, aggregator, climatological=False, **kwargs + ): """ - Perform aggregation over the cube given one or more "group - coordinates". + Perform aggregation over the cube given one or more "group coordinates". A "group coordinate" is a coordinate where repeating values represent a - single group, such as a month coordinate on a daily time slice. - Repeated values will form a group even if they are not consecutive. + single group, such as a month coordinate on a daily time slice. Repeated + values will form a group even if they are not consecutive. The group coordinates must all be over the same cube dimension. Each common value group identified over all the group-by coordinates is collapsed using the provided aggregator. - Args: + Weighted aggregations (:class:`iris.analysis.WeightedAggregator`) may + also be supplied. These include :data:`~iris.analysis.MEAN` and + :data:`~iris.analysis.SUM`. + + Weighted aggregations support an optional *weights* keyword argument. If + set, this should be supplied as an array of weights whose shape matches + the cube or as 1D array whose length matches the dimension over which is + aggregated. - * coords (list of coord names or :class:`iris.coords.Coord` instances): + Parameters + ---------- + coords : (list of coord names or :class:`iris.coords.Coord` instances) One or more coordinates over which group aggregation is to be performed. - * aggregator (:class:`iris.analysis.Aggregator`): + aggregator : :class:`iris.analysis.Aggregator` Aggregator to be applied to each group. - - Kwargs: - - * kwargs: + climatological : bool + Indicates whether the output is expected to be climatological. For + any aggregated time coord(s), this causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. + + Returns + ------- + :class:`iris.cube.Cube` + + Other Parameters + ---------------- + kwargs: Aggregator and aggregation function keyword arguments. - Returns: - :class:`iris.cube.Cube`. - - For example: + Examples + -------- >>> import iris >>> import iris.analysis @@ -3892,14 +3987,6 @@ def aggregated_by(self, coords, aggregator, **kwargs): groupby_coords = [] dimension_to_groupby = None - # We can't handle weights - if isinstance( - aggregator, iris.analysis.WeightedAggregator - ) and aggregator.uses_weighting(**kwargs): - raise ValueError( - "Invalid Aggregation, aggregated_by() cannot use" " weights." - ) - coords = self._as_list_of_coords(coords) for coord in sorted(coords, key=lambda coord: coord.metadata): if coord.ndim > 1: @@ -3922,6 +4009,31 @@ def aggregated_by(self, coords, aggregator, **kwargs): raise iris.exceptions.CoordinateCollapseError(msg) groupby_coords.append(coord) + # Check shape of weights. These must either match the shape of the cube + # or be 1D (in this case, their length must be equal to the length of the + # dimension we are aggregating over). + weights = kwargs.get("weights") + return_weights = kwargs.get("returned", False) + if weights is not None: + if weights.ndim == 1: + if len(weights) != self.shape[dimension_to_groupby]: + raise ValueError( + f"1D weights must have the same length as the dimension " + f"that is aggregated, got {len(weights):d}, expected " + f"{self.shape[dimension_to_groupby]:d}" + ) + weights = iris.util.broadcast_to_shape( + weights, + self.shape, + (dimension_to_groupby,), + ) + if weights.shape != self.shape: + raise ValueError( + f"Weights must either be 1D or have the same shape as the " + f"cube, got shape {weights.shape} for weights, " + f"{self.shape} for cube" + ) + # Determine the other coordinates that share the same group-by # coordinate dimension. shared_coords = list( @@ -3941,7 +4053,9 @@ def aggregated_by(self, coords, aggregator, **kwargs): # Create the aggregation group-by instance. groupby = iris.analysis._Groupby( - groupby_coords, shared_coords_and_dims + groupby_coords, + shared_coords_and_dims, + climatological=climatological, ) # Create the resulting aggregate-by cube and remove the original @@ -3967,16 +4081,41 @@ def aggregated_by(self, coords, aggregator, **kwargs): back_slice = (slice(None, None),) * ( len(data_shape) - dimension_to_groupby - 1 ) + + # Create cube and weights slices groupby_subcubes = map( lambda groupby_slice: self[ front_slice + (groupby_slice,) + back_slice ].lazy_data(), groupby.group(), ) - agg = partial( + if weights is not None: + groupby_subweights = map( + lambda groupby_slice: weights[ + front_slice + (groupby_slice,) + back_slice + ], + groupby.group(), + ) + else: + groupby_subweights = (None for _ in range(len(groupby))) + + agg = iris.analysis.create_weighted_aggregator_fn( aggregator.lazy_aggregate, axis=dimension_to_groupby, **kwargs ) - result = list(map(agg, groupby_subcubes)) + result = list(map(agg, groupby_subcubes, groupby_subweights)) + + # If weights are returned, "result" is a list of tuples (each tuple + # contains two elements; the first is the aggregated data, the + # second is the aggregated weights). Convert these to two lists + # (one for the aggregated data and one for the aggregated weights) + # before combining the different slices. + if return_weights: + result, weights_result = list(zip(*result)) + aggregateby_weights = da.stack( + weights_result, axis=dimension_to_groupby + ) + else: + aggregateby_weights = None aggregateby_data = da.stack(result, axis=dimension_to_groupby) else: cube_slice = [slice(None, None)] * len(data_shape) @@ -3985,13 +4124,23 @@ def aggregated_by(self, coords, aggregator, **kwargs): # sub-cube. cube_slice[dimension_to_groupby] = groupby_slice groupby_sub_cube = self[tuple(cube_slice)] + + # Slice the weights + if weights is not None: + groupby_sub_weights = weights[tuple(cube_slice)] + kwargs["weights"] = groupby_sub_weights + # Perform the aggregation over the group-by sub-cube and - # repatriate the aggregated data into the aggregate-by - # cube data. - cube_slice[dimension_to_groupby] = i + # repatriate the aggregated data into the aggregate-by cube + # data. If weights are also returned, handle them separately. result = aggregator.aggregate( groupby_sub_cube.data, axis=dimension_to_groupby, **kwargs ) + if return_weights: + weights_result = result[1] + result = result[0] + else: + weights_result = None # Determine aggregation result data type for the aggregate-by # cube data on first pass. @@ -4004,7 +4153,20 @@ def aggregated_by(self, coords, aggregator, **kwargs): aggregateby_data = np.zeros( data_shape, dtype=result.dtype ) + if weights_result is not None: + aggregateby_weights = np.zeros( + data_shape, dtype=weights_result.dtype + ) + else: + aggregateby_weights = None + cube_slice[dimension_to_groupby] = i aggregateby_data[tuple(cube_slice)] = result + if weights_result is not None: + aggregateby_weights[tuple(cube_slice)] = weights_result + + # Restore original weights. + if weights is not None: + kwargs["weights"] = weights # Add the aggregation meta data to the aggregate-by cube. aggregator.update_metadata( @@ -4015,22 +4177,36 @@ def aggregated_by(self, coords, aggregator, **kwargs): dimensions=dimension_to_groupby, dim_coords=True ) or [None] for coord in groupby.coords: + new_coord = coord.copy() + + # The metadata may have changed (e.g. climatology), so check if + # there's a better coord to pass to self.coord_dims + lookup_coord = coord + for ( + cube_coord, + groupby_coord, + ) in groupby.coord_replacement_mapping: + if coord == groupby_coord: + lookup_coord = cube_coord + if ( dim_coord is not None - and dim_coord.metadata == coord.metadata + and dim_coord.metadata == lookup_coord.metadata and isinstance(coord, iris.coords.DimCoord) ): - aggregateby_cube.add_dim_coord( - coord.copy(), dimension_to_groupby - ) + aggregateby_cube.add_dim_coord(new_coord, dimension_to_groupby) else: aggregateby_cube.add_aux_coord( - coord.copy(), self.coord_dims(coord) + new_coord, self.coord_dims(lookup_coord) ) # Attach the aggregate-by data into the aggregate-by cube. + if aggregateby_weights is None: + data_result = aggregateby_data + else: + data_result = (aggregateby_data, aggregateby_weights) aggregateby_cube = aggregator.post_process( - aggregateby_cube, aggregateby_data, coords, **kwargs + aggregateby_cube, data_result, coords, **kwargs ) return aggregateby_cube @@ -4247,6 +4423,8 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): interpolate. The values for coordinates that correspond to dates or times may optionally be supplied as datetime.datetime or cftime.datetime instances. + The N pairs supplied will be used to create an N-d grid of points + that will then be sampled (rather than just N points). * scheme: An instance of the type of interpolation to use to interpolate from this :class:`~iris.cube.Cube` to the given sample points. The @@ -4277,7 +4455,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (time: 3; model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(cube.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] shape: (3,) dtype: float64 @@ -4290,7 +4468,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:30:00] shape: (1,) dtype: float64 @@ -4305,7 +4483,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result2.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:30:00] shape: (1,) dtype: float64 diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 48e11e1fb0..116b340592 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -85,28 +85,32 @@ def __init__(self, cube): self.cube_id = id(self.cube) self.cube_str = escape(str(self.cube)) - self.str_headings = { - "Dimension coordinates:": None, - "Auxiliary coordinates:": None, - "Mesh coordinates:": None, - "Derived coordinates:": None, - "Cell measures:": None, - "Ancillary variables:": None, - "Scalar coordinates:": None, - "Scalar cell measures:": None, - "Cell methods:": None, - "Attributes:": None, - } - self.dim_desc_coords = [ + # Define the expected vector and scalar sections in output, in expected + # order of appearance. + # NOTE: if we recoded this to use a CubeSummary, these section titles + # would be available from that. + self.vector_section_names = [ "Dimension coordinates:", - "Auxiliary coordinates:", "Mesh coordinates:", + "Auxiliary coordinates:", "Derived coordinates:", "Cell measures:", "Ancillary variables:", ] - - self.two_cell_headers = ["Scalar coordinates:", "Attributes:"] + self.scalar_section_names = [ + "Mesh:", + "Scalar coordinates:", + "Scalar cell measures:", + "Cell methods:", + "Attributes:", + ] + self.sections_data = { + name: None + for name in self.vector_section_names + self.scalar_section_names + } + # 'Scalar-cell-measures' is currently alone amongst the scalar sections, + # in displaying only a 'name' and no 'value' field. + self.single_cell_section_names = ["Scalar cell measures:"] # Important content that summarises a cube is defined here. self.shapes = self.cube.shape @@ -160,7 +164,7 @@ def _get_bits(self, bits): # Get heading indices within the printout. start_inds = [] - for hdg in self.str_headings.keys(): + for hdg in self.sections_data.keys(): heading = "{}{}".format(left_indent, hdg) try: start_ind = bits.index(heading) @@ -178,7 +182,7 @@ def _get_bits(self, bits): content = bits[i0 + 1 : i1] else: content = bits[i0 + 1 :] - self.str_headings[str_heading_name] = content + self.sections_data[str_heading_name] = content def _make_header(self): """ @@ -272,47 +276,29 @@ def _make_row(self, title, body=None, col_span=0): row.append("") return row - def _expand_last_cell(self, element, body): - """Expand an element containing a cell by adding a new line.""" - split_point = element.index("") - element = element[:split_point] + "
" + body + element[split_point:] - return element - def _make_content(self): elements = [] - for k, v in self.str_headings.items(): + for k, v in self.sections_data.items(): if v is not None: # Add the sub-heading title. elements.extend(self._make_row(k)) for line in v: # Add every other row in the sub-heading. - if k in self.dim_desc_coords: + if k in self.vector_section_names: body = re.findall(r"[\w-]+", line) title = body.pop(0) colspan = 0 - elif k in self.two_cell_headers: - try: - split_point = line.index(":") - except ValueError: - # When a line exists in v without a ':', we expect - # that this is due to the value of some attribute - # containing multiple lines. We collect all these - # lines in the same cell. - body = line.strip() - # We choose the element containing the last cell - # in the last row. - element = elements[-2] - element = self._expand_last_cell(element, body) - elements[-2] = element - continue + else: + colspan = self.ndims + if k in self.single_cell_section_names: + title = line.strip() + body = "" else: + line = line.strip() + split_point = line.index(" ") title = line[:split_point].strip() body = line[split_point + 2 :].strip() - colspan = self.ndims - else: - title = line.strip() - body = "" - colspan = self.ndims + elements.extend( self._make_row(title, body=body, col_span=colspan) ) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d286abbf3d..09237d3f11 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -18,7 +18,7 @@ 3) Iris-specific info is (still) stored in additional properties created on the engine object : - engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + engine.cf_var, .cube, .cube_parts, .requires, .rules_triggered, .filename Our "rules" are just action routines. The top-level 'run_actions' routine decides which actions to call, based on the @@ -78,7 +78,7 @@ def inner(engine, *args, **kwargs): # but also may vary depending on whether it successfully # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) - engine.rule_triggered.add(rule_name) + engine.rules_triggered.add(rule_name) func._rulenames_func = _default_rulenamesfunc return inner @@ -110,9 +110,13 @@ def action_default(engine): hh.build_transverse_mercator_coordinate_system, ), hh.CF_GRID_MAPPING_STEREO: ( - hh.has_supported_stereographic_parameters, + None, hh.build_stereographic_coordinate_system, ), + hh.CF_GRID_MAPPING_POLAR: ( + hh.has_supported_polar_stereographic_parameters, + hh.build_polar_stereographic_coordinate_system, + ), hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL: ( None, hh.build_lambert_conformal_coordinate_system, diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index a5b507d583..d50d3f324a 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -19,7 +19,9 @@ import cf_units import numpy as np import numpy.ma as ma +import pyproj +import iris import iris.aux_factory from iris.common.mixin import _get_valid_standard_name import iris.coord_systems @@ -131,6 +133,8 @@ CF_ATTR_BOUNDS = "bounds" CF_ATTR_CALENDAR = "calendar" CF_ATTR_CLIMATOLOGY = "climatology" +CF_ATTR_GRID_CRS_WKT = "crs_wkt" +CF_ATTR_GRID_DATUM = "horizontal_datum_name" CF_ATTR_GRID_INVERSE_FLATTENING = "inverse_flattening" CF_ATTR_GRID_EARTH_RADIUS = "earth_radius" CF_ATTR_GRID_MAPPING_NAME = "grid_mapping_name" @@ -141,6 +145,7 @@ CF_ATTR_GRID_SEMI_MINOR_AXIS = "semi_minor_axis" CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = "latitude_of_projection_origin" CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = "longitude_of_projection_origin" +CF_ATTR_GRID_STRAIGHT_VERT_LON = "straight_vertical_longitude_from_pole" CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" CF_ATTR_GRID_FALSE_EASTING = "false_easting" CF_ATTR_GRID_FALSE_NORTHING = "false_northing" @@ -233,7 +238,10 @@ def build_cube_metadata(engine): ################################################################################ def _get_ellipsoid(cf_grid_var): - """Return the ellipsoid definition.""" + """ + Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of + `cf_grid_var`. Returns None if no relevant properties are specified. + """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) inverse_flattening = getattr( @@ -248,21 +256,51 @@ def _get_ellipsoid(cf_grid_var): if major is None and minor is None and inverse_flattening is None: major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) - return major, minor, inverse_flattening + datum = getattr(cf_grid_var, CF_ATTR_GRID_DATUM, None) + # Check crs_wkt if no datum + if datum is None: + crs_wkt = getattr(cf_grid_var, CF_ATTR_GRID_CRS_WKT, None) + if crs_wkt is not None: + proj_crs = pyproj.crs.CRS.from_wkt(crs_wkt) + if proj_crs.datum is not None: + datum = proj_crs.datum.name + + # An unknown crs datum will be treated as None + if datum == "unknown": + datum = None + + if not iris.FUTURE.datum_support: + wmsg = ( + "Ignoring a datum in netCDF load for consistency with existing " + "behaviour. In a future version of Iris, this datum will be " + "applied. To apply the datum when loading, use the " + "iris.FUTURE.datum_support flag." + ) + warnings.warn(wmsg, FutureWarning, stacklevel=14) + datum = None + + if datum is not None: + return iris.coord_systems.GeogCS.from_datum(datum) + elif major is None and minor is None and inverse_flattening is None: + return None + else: + return iris.coord_systems.GeogCS(major, minor, inverse_flattening) ################################################################################ def build_coordinate_system(engine, cf_grid_var): """Create a coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - return iris.coord_systems.GeogCS(major, minor, inverse_flattening) + coord_system = _get_ellipsoid(cf_grid_var) + if coord_system is None: + raise ValueError("No ellipsoid specified") + else: + return coord_system ################################################################################ def build_rotated_coordinate_system(engine, cf_grid_var): """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) north_pole_latitude = getattr( cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 @@ -277,14 +315,6 @@ def build_rotated_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - rcs = iris.coord_systems.RotatedGeogCS( north_pole_latitude, north_pole_longitude, @@ -302,7 +332,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -327,14 +357,6 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.TransverseMercator( latitude_of_projection_origin, longitude_of_central_meridian, @@ -354,7 +376,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -368,14 +390,6 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.LambertConformal( latitude_of_projection_origin, longitude_of_central_meridian, @@ -395,7 +409,7 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -403,18 +417,12 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - # Iris currently only supports Stereographic projections with a scale - # factor of 1.0. This is checked elsewhere. - - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) cs = iris.coord_systems.Stereographic( latitude_of_projection_origin, @@ -422,6 +430,43 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): false_easting, false_northing, true_scale_lat=None, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ellipsoid=ellipsoid, + ) + + return cs + + +################################################################################ +def build_polar_stereographic_coordinate_system(engine, cf_grid_var): + """ + Create a polar stereographic coordinate system from the CF-netCDF + grid mapping variable. + + """ + ellipsoid = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_STRAIGHT_VERT_LON, None + ) + true_scale_lat = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + cs = iris.coord_systems.PolarStereographic( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + true_scale_lat, + scale_factor_at_projection_origin, ellipsoid=ellipsoid, ) @@ -435,26 +480,27 @@ def build_mercator_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) - # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. - - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, ellipsoid=ellipsoid + longitude_of_projection_origin, + ellipsoid=ellipsoid, + standard_parallel=standard_parallel, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + false_easting=false_easting, + false_northing=false_northing, ) return cs @@ -467,7 +513,7 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -478,14 +524,6 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.LambertAzimuthalEqualArea( latitude_of_projection_origin, longitude_of_projection_origin, @@ -504,7 +542,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -518,14 +556,6 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.AlbersEqualArea( latitude_of_projection_origin, longitude_of_central_meridian, @@ -545,7 +575,7 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -559,14 +589,6 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.VerticalPerspective( latitude_of_projection_origin, longitude_of_projection_origin, @@ -586,7 +608,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -603,14 +625,6 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.Geostationary( latitude_of_projection_origin, longitude_of_projection_origin, @@ -1244,40 +1258,20 @@ def has_supported_mercator_parameters(engine, cf_name): is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] - false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None - ) standard_parallel = getattr( cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) - if false_easting is not None and false_easting != 0: - warnings.warn( - "False eastings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False - if false_northing is not None and false_northing != 0: - warnings.warn( - "False northings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False if ( scale_factor_at_projection_origin is not None - and scale_factor_at_projection_origin != 1 + and standard_parallel is not None ): warnings.warn( - "Scale factors other than 1.0 not yet supported for " - "Mercator projections" - ) - is_valid = False - if standard_parallel is not None and standard_parallel != 0: - warnings.warn( - "Standard parallels other than 0.0 not yet " - "supported for Mercator projections" + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "standard_parallel".' ) is_valid = False @@ -1285,24 +1279,45 @@ def has_supported_mercator_parameters(engine, cf_name): ################################################################################ -def has_supported_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has a value of 1.0 - for the scale_factor_at_projection_origin attribute.""" +def has_supported_polar_stereographic_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has the supported + values for the parameters of the Polar Stereographic projection.""" is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) + if ( + latitude_of_projection_origin != 90 + and latitude_of_projection_origin != -90 + ): + warnings.warn('"latitude_of_projection_origin" must be +90 or -90.') + is_valid = False + if ( scale_factor_at_projection_origin is not None - and scale_factor_at_projection_origin != 1 + and standard_parallel is not None ): warnings.warn( - "Scale factors other than 1.0 not yet supported for " - "stereographic projections" + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "standard_parallel".' + ) + is_valid = False + + if scale_factor_at_projection_origin is None and standard_parallel is None: + warnings.warn( + 'One of "scale_factor_at_projection_origin" and ' + '"standard_parallel" is required.' ) is_valid = False diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index b22fbd3b51..a3a23dc323 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1044,6 +1044,7 @@ class CFReader: CFGroup = CFGroup def __init__(self, filename, warn=False, monotonic=False): + self._dataset = None self._filename = os.path.expanduser(filename) #: Collection of CF-netCDF variables associated with this netCDF file @@ -1295,7 +1296,8 @@ def _reset(self): def __del__(self): # Explicitly close dataset to prevent file remaining open. - self._dataset.close() + if self._dataset is not None: + self._dataset.close() def _getncattr(dataset, attr, default=None): diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 2fb628bebf..50c02e4d04 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -59,14 +59,18 @@ def _dot_path(): def save(cube, target): - """Save a dot representation of the cube. - - Args: + """ + Save a dot representation of the cube. - * cube - A :class:`iris.cube.Cube`. - * target - A filename or open file handle. + Args + ---- + cube: :class:`iris.cube.Cube`. + target + A filename or open file handle. - See also :func:`iris.io.save`. + See Also + -------- + :func:`iris.io.save`. """ if isinstance(target, str): @@ -87,19 +91,23 @@ def save(cube, target): def save_png(source, target, launch=False): """ - Produces a "dot" instance diagram by calling dot and optionally launching the resulting image. - - Args: + Produce a "dot" instance diagram by calling dot and optionally launching + the resulting image. - * source - A :class:`iris.cube.Cube`, or dot filename. - * target - A filename or open file handle. - If passing a file handle, take care to open it for binary output. + Args + ---- + source: :class:`iris.cube.Cube`, or dot filename. + target + A filename or open file handle. + If passing a file handle, take care to open it for binary output. - Kwargs: + **kwargs + * launch + Display the image. Default is False. - * launch - Display the image. Default is False. - - See also :func:`iris.io.save`. + See Also + -------- + :func:`iris.io.save`. """ # From cube or dot file? @@ -152,11 +160,13 @@ def save_png(source, target, launch=False): def cube_text(cube): - """Return a DOT text representation a `iris.cube.Cube`. - - Args: + """ + Return a DOT text representation a `iris.cube.Cube`. - * cube - The cube for which to create DOT text. + Args + ---- + cube + The cube for which to create DOT text. """ # We use r'' type string constructor as when we type \n in a string without the r'' constructor @@ -283,13 +293,14 @@ def cube_text(cube): def _coord_text(label, coord): """ - Returns a string containing the dot representation for a single coordinate node. - - Args: + Return a string containing the dot representation for a single coordinate + node. - * label + Args + ---- + label The dot ID of the coordinate node. - * coord + coord The coordinate to convert. """ @@ -315,14 +326,16 @@ def _coord_text(label, coord): def _coord_system_text(cs, uid): """ - Returns a string containing the dot representation for a single coordinate system node. + Return a string containing the dot representation for a single coordinate + system node. - Args: - - * cs + Args + ---- + cs The coordinate system to convert. - * uid - The uid allows/distinguishes non-identical CoordSystems of the same type. + uid + The uid allows/distinguishes non-identical CoordSystems of the same + type. """ attrs = [] @@ -341,15 +354,15 @@ def _coord_system_text(cs, uid): def _dot_node(indent, id, name, attributes): """ - Returns a string containing the dot representation for a single node. - - Args: + Return a string containing the dot representation for a single node. - * id + Args + ---- + id The ID of the node. - * name + name The visual name of the node. - * attributes + attributes An iterable of (name, value) attribute pairs. """ diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 34e88aff80..3aaba3679e 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -456,7 +456,7 @@ def _generate_cubes( # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN + "hours since epoch", calendar=cf_units.CALENDAR_STANDARD ) # Build time, height, latitude and longitude coordinates. @@ -1212,7 +1212,7 @@ def load_NAMEIII_trajectory(filename): """ time_unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN + "hours since epoch", calendar=cf_units.CALENDAR_STANDARD ) with open(filename, "r") as infile: diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 100ab29daa..6a7b37a1cc 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -19,6 +19,7 @@ import os.path import re import string +from typing import List import warnings import cf_units @@ -185,13 +186,14 @@ _CM_INTERVAL = "interval" _CM_METHOD = "method" _CM_NAME = "name" +_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") _CM_PARSE = re.compile( r""" (?P([\w_]+\s*?:\s+)+) (?P[\w_\s]+(?![\w_]*\s*?:))\s* (?: \(\s* - (?P[^\)]+) + (?P.+) \)\s* )? """, @@ -203,6 +205,69 @@ class UnknownCellMethodWarning(Warning): pass +def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: + """ + Split a CF cell_methods attribute string into a list of zero or more cell + methods, each of which is then parsed with a regex to return a list of match + objects. + + Args: + + * nc_cell_methods: The value of the cell methods attribute to be split. + + Returns: + + * nc_cell_methods_matches: A list of the re.Match objects associated with + each parsed cell method + + Splitting is done based on words followed by colons outside of any brackets. + Validation of anything other than being laid out in the expected format is + left to the calling function. + """ + + # Find name candidates + name_start_inds = [] + for m in _CM_PARSE_NAME.finditer(nc_cell_methods): + name_start_inds.append(m.start()) + + # Remove those that fall inside brackets + bracket_depth = 0 + for ind, cha in enumerate(nc_cell_methods): + if cha == "(": + bracket_depth += 1 + elif cha == ")": + bracket_depth -= 1 + if bracket_depth < 0: + msg = ( + "Cell methods may be incorrectly parsed due to mismatched " + "brackets" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + if bracket_depth > 0 and ind in name_start_inds: + name_start_inds.remove(ind) + + # List tuples of indices of starts and ends of the cell methods in the string + method_indices = [] + for ii in range(len(name_start_inds) - 1): + method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) + method_indices.append((name_start_inds[-1], len(nc_cell_methods))) + + # Index the string and match against each substring + nc_cell_methods_matches = [] + for start_ind, end_ind in method_indices: + nc_cell_method_str = nc_cell_methods[start_ind:end_ind] + nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) + if not nc_cell_method_match: + msg = ( + f"Failed to fully parse cell method string: {nc_cell_methods}" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + continue + nc_cell_methods_matches.append(nc_cell_method_match) + + return nc_cell_methods_matches + + def parse_cell_methods(nc_cell_methods): """ Parse a CF cell_methods attribute string into a tuple of zero or @@ -226,7 +291,7 @@ def parse_cell_methods(nc_cell_methods): cell_methods = [] if nc_cell_methods is not None: - for m in _CM_PARSE.finditer(nc_cell_methods): + for m in _split_cell_methods(nc_cell_methods): d = m.groupdict() method = d[_CM_METHOD] method = method.strip() @@ -498,7 +563,7 @@ def _actions_activation_stats(engine, cf_name): print("Rules Triggered:") - for rule in sorted(list(engine.rule_triggered)): + for rule in sorted(list(engine.rules_triggered)): print("\t%s" % rule) print("Case Specific Facts:") @@ -570,13 +635,21 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) -class OrderedAddableList(list): - # Used purely in actions debugging, to accumulate a record of which actions - # were activated. - # It replaces a set, so as to record the ordering of operations, with - # possible repeats, and it also numbers the entries. - # Actions routines invoke the 'add' method, which thus effectively converts - # a set.add into a list.append. +class _OrderedAddableList(list): + """ + A custom container object for actions recording. + + Used purely in actions debugging, to accumulate a record of which actions + were activated. + + It replaces a set, so as to preserve the ordering of operations, with + possible repeats, and it also numbers the entries. + + The actions routines invoke an 'add' method, so this effectively replaces + a set.add with a list.append. + + """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._n_add = 0 @@ -602,7 +675,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = OrderedAddableList() + engine.rules_triggered = _OrderedAddableList() engine.filename = filename # Assert all the case-specific facts. @@ -825,12 +898,12 @@ def inner(cf_datavar): def load_cubes(filenames, callback=None, constraints=None): """ - Loads cubes from a list of NetCDF filenames/URLs. + Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. Args: * filenames (string/list): - One or more NetCDF filenames/DAP URLs to load from. + One or more NetCDF filenames/OPeNDAP URLs to load from. Kwargs: @@ -1368,6 +1441,8 @@ def _create_cf_dimensions( unlimited_dim_names.append(dim_name) for dim_name in dimension_names: + # NOTE: these dim-names have been chosen by _get_dim_names, and + # were already checked+fixed to avoid any name collisions. if dim_name not in self._dataset.dimensions: if dim_name in unlimited_dim_names: size = None @@ -1460,6 +1535,10 @@ def _add_mesh(self, cube_or_mesh): last_dim = f"{cf_mesh_name}_{loc_from}_N_{loc_to}s" # Create if it does not already exist. if last_dim not in self._dataset.dimensions: + while last_dim in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + last_dim = self._increment_name(last_dim) length = conn.shape[1 - conn.location_axis] self._dataset.createDimension(last_dim, length) @@ -1861,8 +1940,19 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): assert dim_name is not None # Ensure it is a valid variable name. dim_name = self.cf_valid_var_name(dim_name) - # Disambiguate if it matches an existing one. - while dim_name in self._existing_dim: + # Disambiguate if it has the same name as an existing + # dimension. + # NOTE: *OR* if it matches the name of an existing file + # variable. Because there is a bug ... + # See https://github.com/Unidata/netcdf-c/issues/1772 + # N.B. the workarounds here *ONLY* function because the + # caller (write) will not create any more variables + # in between choosing dim names (here), and creating + # the new dims (via '_create_cf_dimensions'). + while ( + dim_name in self._existing_dim + or dim_name in self._dataset.variables + ): dim_name = self._increment_name(dim_name) # Record the new dimension. @@ -1907,9 +1997,15 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): dim_name = self._get_coord_variable_name( cube, coord ) + # Disambiguate if it has the same name as an + # existing dimension. + # OR if it matches an existing file variable name. + # NOTE: check against variable names is needed + # because of a netcdf bug ... see note in the + # mesh dimensions block above. while ( dim_name in self._existing_dim - or dim_name in self._name_coord_map.names + or dim_name in self._dataset.variables ): dim_name = self._increment_name(dim_name) @@ -1917,16 +2013,18 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): # No CF-netCDF coordinates describe this data dimension. # Make up a new, distinct dimension name dim_name = f"dim{dim}" - if dim_name in self._existing_dim: - # Increment name if conflicted with one already existing. - if self._existing_dim[dim_name] != cube.shape[dim]: - while ( - dim_name in self._existing_dim - and self._existing_dim[dim_name] - != cube.shape[dim] - or dim_name in self._name_coord_map.names - ): - dim_name = self._increment_name(dim_name) + # Increment name if conflicted with one already existing + # (or planned) + # NOTE: check against variable names is needed because + # of a netcdf bug ... see note in the mesh dimensions + # block above. + while ( + dim_name in self._existing_dim + and ( + self._existing_dim[dim_name] != cube.shape[dim] + ) + ) or dim_name in self._dataset.variables: + dim_name = self._increment_name(dim_name) # Record the dimension. record_dimension( @@ -2057,6 +2155,12 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): if bounds_dimension_name not in self._dataset.dimensions: # Create the bounds dimension with the appropriate extent. + while bounds_dimension_name in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + bounds_dimension_name = self._increment_name( + bounds_dimension_name + ) self._dataset.createDimension(bounds_dimension_name, n_bounds) boundsvar_name = "{}_{}".format(cf_name, varname_extra) @@ -2337,6 +2441,12 @@ def _create_generic_cf_array_var( # Determine whether to create the string length dimension. if string_dimension_name not in self._dataset.dimensions: + while string_dimension_name in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + string_dimension_name = self._increment_name( + string_dimension_name + ) self._dataset.createDimension( string_dimension_name, string_dimension_depth ) @@ -2511,6 +2621,8 @@ def add_ellipsoid(ellipsoid): else: cf_var_grid.semi_major_axis = semi_major cf_var_grid.semi_minor_axis = semi_minor + if ellipsoid.datum is not None: + cf_var_grid.horizontal_datum_name = ellipsoid.datum # latlon if isinstance(cs, iris.coord_systems.GeogCS): @@ -2553,11 +2665,15 @@ def add_ellipsoid(ellipsoid): cf_var_grid.longitude_of_projection_origin = ( cs.longitude_of_projection_origin ) - # The Mercator class has implicit defaults for certain - # parameters - cf_var_grid.false_easting = 0.0 - cf_var_grid.false_northing = 0.0 - cf_var_grid.scale_factor_at_projection_origin = 1.0 + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set + if cs.standard_parallel is not None: + cf_var_grid.standard_parallel = cs.standard_parallel + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin + ) # lcc elif isinstance(cs, iris.coord_systems.LambertConformal): @@ -2569,27 +2685,46 @@ def add_ellipsoid(ellipsoid): cf_var_grid.false_easting = cs.false_easting cf_var_grid.false_northing = cs.false_northing - # stereo - elif isinstance(cs, iris.coord_systems.Stereographic): + # polar stereo (have to do this before Stereographic because it subclasses it) + elif isinstance(cs, iris.coord_systems.PolarStereographic): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.latitude_of_projection_origin = cs.central_lat + cf_var_grid.straight_vertical_longitude_from_pole = ( + cs.central_lon + ) + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set if cs.true_scale_lat is not None: - warnings.warn( - "Stereographic coordinate systems with " - "true scale latitude specified are not " - "yet handled" + cf_var_grid.true_scale_lat = cs.true_scale_lat + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin ) else: - if cs.ellipsoid: - add_ellipsoid(cs.ellipsoid) - cf_var_grid.longitude_of_projection_origin = ( - cs.central_lon + cf_var_grid.scale_factor_at_projection_origin = 1.0 + + # stereo + elif isinstance(cs, iris.coord_systems.Stereographic): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.longitude_of_projection_origin = cs.central_lon + cf_var_grid.latitude_of_projection_origin = cs.central_lat + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set + if cs.true_scale_lat is not None: + msg = ( + "It is not valid CF to save a true_scale_lat for " + "a Stereographic grid mapping." ) - cf_var_grid.latitude_of_projection_origin = ( - cs.central_lat + raise ValueError(msg) + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin ) - cf_var_grid.false_easting = cs.false_easting - cf_var_grid.false_northing = cs.false_northing - # The Stereographic class has an implicit scale - # factor + else: cf_var_grid.scale_factor_at_projection_origin = 1.0 # osgb (a specific tmerc) @@ -2738,9 +2873,9 @@ def _create_cf_data_variable( cmin, cmax = _co_realise_lazy_arrays([cmin, cmax]) n = dtype.itemsize * 8 if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dtype.kind == "u": add_offset = cmin elif dtype.kind == "i": @@ -2865,6 +3000,14 @@ def _increment_name(self, varname): @staticmethod def _lazy_stream_data(data, fill_value, fill_warn, cf_var): + if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: + # (Don't do this check for string data). + # Reduce dimensionality where the data array has an extra dimension + # versus the cf_var - to avoid a broadcasting ambiguity. + # Happens when bounds data is for a scalar point - array is 2D but + # contains just 1 row, so the cf_var is 1D. + data = data.squeeze(axis=0) + if is_lazy_data(data): def store(data, cf_var, fill_value): diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index b0e0f6e864..fd1ccb0e95 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -24,7 +24,7 @@ NIMROD_DEFAULT = -32767.0 TIME_UNIT = cf_units.Unit( - "seconds since 1970-01-01 00:00:00", calendar=cf_units.CALENDAR_GREGORIAN + "seconds since 1970-01-01 00:00:00", calendar=cf_units.CALENDAR_STANDARD ) @@ -33,7 +33,7 @@ class TranslationWarning(Warning): def is_missing(field, value): - """Returns True if value matches an "is-missing" number.""" + """Return True if value matches an "is-missing" number.""" return any( np.isclose(value, [field.int_mdi, field.float32_mdi, NIMROD_DEFAULT]) ) @@ -86,7 +86,8 @@ def name(cube, field, handle_metadata_errors): def remove_unprintable_chars(input_str): """ - Removes unprintable characters from a string and returns the result. + Remove unprintable characters from a string and return the result. + """ return "".join( c if c in string.printable else " " for c in input_str @@ -278,7 +279,7 @@ def forecast_period(cube): def mask_cube(cube, field): """ - Updates cube.data to be a masked array if appropriate. + Update cube.data to be a masked array if appropriate. """ dtype = cube.dtype @@ -307,16 +308,17 @@ def experiment(cube, field): def proj_biaxial_ellipsoid(field, handle_metadata_errors): """ - Returns the correct dictionary of arguments needed to define an + Return the correct dictionary of arguments needed to define an iris.coord_systems.GeogCS. Based firstly on the value given by ellipsoid, then by grid if ellipsoid is missing, select the right pre-defined ellipsoid dictionary (Airy_1830 or international_1924). - References: - Airy 1830: https://georepository.com/ellipsoid_7001/Airy-1830.html - International 1924: https://georepository.com/ellipsoid_7022/International-1924.html + References + ---------- + Airy 1830: https://georepository.com/ellipsoid_7001/Airy-1830.html + International 1924: https://georepository.com/ellipsoid_7022/International-1924.html """ airy_1830 = { @@ -357,10 +359,12 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): def set_british_national_grid_defaults(field, handle_metadata_errors): - """Check for missing coord-system meta-data and set default values for + """ + Check for missing coord-system meta-data and set default values for the Ordnance Survey GB Transverse Mercator projection. Some Radarnet - files are missing these.""" + files are missing these. + """ if handle_metadata_errors: if is_missing(field, field.true_origin_latitude): field.true_origin_latitude = 49.0 @@ -472,8 +476,12 @@ def horizontal_grid(cube, field, handle_metadata_errors): def vertical_coord(cube, field): - """Add a vertical coord to the cube, with bounds, if appropriate. - Handles special numbers for "at-sea-level" (8888) and "at-ground-level" (9999).""" + """ + Add a vertical coord to the cube, with bounds, if appropriate. + Handles special numbers for "at-sea-level" (8888) and "at-ground-level" + (9999). + + """ # vertical_codes contains conversions from the Nimrod Documentation for the # header entry 20 for the vertical coordinate type # Unhandled vertical_codes values (no use case identified): @@ -670,7 +678,10 @@ def add_attr(item): def known_threshold_coord(field): """ Supplies known threshold coord meta-data for known use cases. - threshold_value_alt exists because some meta-data are mis-assigned in the Nimrod data. + + threshold_value_alt exists because some meta-data are mis-assigned in the + Nimrod data. + """ coord_keys = {} if ( @@ -715,9 +726,11 @@ def known_threshold_coord(field): def probability_coord(cube, field, handle_metadata_errors): """ - Adds a coord relating to probability meta-data from the header to the + Add a coord relating to probability meta-data from the header to the cube if appropriate. + Must be run after the name method. + """ probtype_lookup = { 1: { @@ -848,7 +861,7 @@ def probability_coord(cube, field, handle_metadata_errors): def soil_type_coord(cube, field): - """Add soil type as a coord if appropriate""" + """Add soil type as a coord if appropriate.""" soil_type_codes = { 1: "broadleaf_tree", 2: "needleleaf_tree", @@ -905,17 +918,18 @@ def run(field, handle_metadata_errors=True): """ Convert a NIMROD field to an Iris cube. - Args: - - * field - a :class:`~iris.fileformats.nimrod.NimrodField` - - * handle_metadata_errors - Set to False to omit handling of known meta-data deficiencies - in Nimrod-format data - - Returns: + Args + ---- + field: :class:`~iris.fileformats.nimrod.NimrodField` - * A new :class:`~iris.cube.Cube`, created from the NimrodField. + handle_metadata_errors + Set to False to omit handling of known meta-data deficiencies + in Nimrod-format data + Returns + ------- + :class:`~iris.cube.Cube` + A new :class:`~iris.cube.Cube`, created from the NimrodField. """ cube = iris.cube.Cube(field.data) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 9f213ec4db..1fb7d4e178 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -253,14 +253,13 @@ class STASH(collections.namedtuple("STASH", "model section item")): def __new__(cls, model, section, item): """ - - Args: - - * model + Args + ---- + model A positive integer less than 100, or None. - * section + section A non-negative integer less than 100, or None. - * item + item A positive integer less than 1000, or None. """ @@ -350,7 +349,9 @@ class SplittableInt: >>> print(three_six_two[2]) 3 - .. note:: No support for negative numbers + Notes + ----- + No support for negative numbers """ @@ -358,11 +359,12 @@ def __init__(self, value, name_mapping_dict=None): """ Build a SplittableInt given the positive integer value provided. - Kwargs: - - * name_mapping_dict - (dict) - A special mapping to provide name based access to specific integer - positions: + Args + ---- + **kwargs + * name_mapping_dict - (dict) + A special mapping to provide name based access to specific + integer positions: >>> a = SplittableInt(1234, {'hundreds': 2}) >>> print(a.hundreds) @@ -373,6 +375,7 @@ def __init__(self, value, name_mapping_dict=None): >>> print(a) 1934 + """ if value < 0: raise ValueError( @@ -403,7 +406,7 @@ def _calculate_str_value_from_value(self): def _calculate_value_from_str_value(self): self._value = np.sum( - [10 ** i * val for i, val in enumerate(self._strvalue)] + [10**i * val for i, val in enumerate(self._strvalue)] ) def __len__(self): @@ -418,7 +421,7 @@ def __getitem__(self, key): # if the key returns a list of values, then combine them together # to an integer if isinstance(val, list): - val = sum([10 ** i * val for i, val in enumerate(val)]) + val = sum([10**i * val for i, val in enumerate(val)]) return val @@ -789,7 +792,7 @@ def _data_bytes_to_shaped_array( def _header_defn(release_number): """ - Returns the zero-indexed header definition for a particular release of + Return the zero-indexed header definition for a particular release of a PPField. """ @@ -803,7 +806,7 @@ def _header_defn(release_number): def _pp_attribute_names(header_defn): """ - Returns the allowed attributes of a PPField: + Return the allowed attributes of a PPField: all of the normal headers (i.e. not the _SPECIAL_HEADERS), the _SPECIAL_HEADERS with '_' prefixed, the possible extra data headers. @@ -860,7 +863,7 @@ def __init__(self, header=None): def __getattr__(self, key): """ - This method supports deferred attribute creation, which offers a + Method supports deferred attribute creation, which offers a significant loading optimisation, particularly when not all attributes are referenced and therefore created on the instance. @@ -922,7 +925,6 @@ def t2(self): def __repr__(self): """Return a string representation of the PP field.""" - # Define an ordering on the basic header names attribute_priority_lookup = { name: loc[0] for name, loc in self.HEADER_DEFN @@ -960,7 +962,7 @@ def __repr__(self): @property def stash(self): """ - A stash property giving access to the associated STASH object, + Stash property giving access to the associated STASH object, now supporting __eq__ """ @@ -1044,7 +1046,7 @@ def lbproc(self, value): @property def data(self): """ - The :class:`numpy.ndarray` representing the multidimensional data + :class:`numpy.ndarray` representing the multidimensional data of the pp file """ @@ -1064,7 +1066,7 @@ def core_data(self): def calendar(self): """Return the calendar of the field.""" # TODO #577 What calendar to return when ibtim.ic in [0, 3] - calendar = cf_units.CALENDAR_GREGORIAN + calendar = cf_units.CALENDAR_STANDARD if self.lbtim.ic == 2: calendar = cf_units.CALENDAR_360_DAY elif self.lbtim.ic == 4: @@ -1075,7 +1077,6 @@ def _read_extra_data( self, pp_file, file_reader, extra_len, little_ended=False ): """Read the extra data section and update the self appropriately.""" - dtype_endian_char = "<" if little_ended else ">" # While there is still extra data to decode run this loop while extra_len > 0: @@ -1124,10 +1125,8 @@ def y_bounds(self): def save(self, file_handle): """ - Save the PPField to the given file object - (typically created with :func:`open`). - - :: + Save the PPField to the given file object. + (typically created with :func:`open`):: # to append the field to a file with open(filename, 'ab') as fh: @@ -1137,15 +1136,13 @@ def save(self, file_handle): with open(filename, 'wb') as fh: a_pp_field.save(fh) - - .. note:: - - The fields which are automatically calculated are: 'lbext', - 'lblrec' and 'lbuser[0]'. Some fields are not currently - populated, these are: 'lbegin', 'lbnrec', 'lbuser[1]'. + Notes + ----- + The fields which are automatically calculated are: 'lbext', + 'lblrec' and 'lbuser[0]'. Some fields are not currently + populated, these are: 'lbegin', 'lbnrec', 'lbuser[1]'. """ - # Get the actual data content. data = self.data mdi = self.bmdi @@ -1361,9 +1358,9 @@ def time_unit(self, time_unit, epoch="epoch"): def coord_system(self): """Return a CoordSystem for this PPField. - Returns: - Currently, a :class:`~iris.coord_systems.GeogCS` or - :class:`~iris.coord_systems.RotatedGeogCS`. + Returns + ------- + :class:`~iris.coord_systems.GeogCS` or class:`~iris.coord_systems.RotatedGeogCS`. """ geog_cs = iris.coord_systems.GeogCS(EARTH_RADIUS) @@ -1408,9 +1405,11 @@ def _y_coord_name(self): def copy(self): """ - Returns a deep copy of this PPField. + Return a deep copy of this PPField. - Returns: + Returns + ------- + :class:`PPField`: A copy instance of the :class:`PPField`. """ @@ -1470,7 +1469,7 @@ class PPField2(PPField): @property def t1(self): """ - A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. """ @@ -1504,7 +1503,7 @@ def t1(self, dt): @property def t2(self): """ - A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. """ @@ -1551,7 +1550,7 @@ class PPField3(PPField): @property def t1(self): """ - A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. """ @@ -1586,7 +1585,7 @@ def t1(self, dt): @property def t2(self): """ - A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. """ @@ -1638,20 +1637,20 @@ def load(filename, read_data=False, little_ended=False): """ Return an iterator of PPFields given a filename. - Args: - - * filename - string of the filename to load. - - Kwargs: - - * read_data - boolean - Flag whether or not the data should be read, if False an empty - data manager will be provided which can subsequently load the data - on demand. Default False. - - * little_ended - boolean - If True, file contains all little-ended words (header and data). - + Args + ---- + filename + string of the filename to load. + **kwargs + * read_data - boolean + Flag whether or not the data should be read, if False an empty + data manager will be provided which can subsequently load the data + on demand. Default False. + * little_ended - boolean + If True, file contains all little-ended words (header and data). + + Notes + ----- To iterate through all of the fields in a pp file:: for field in iris.fileformats.pp.load(filename): @@ -1737,7 +1736,7 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask_field=None): """ - Modifies a field's ``_data`` attribute either by: + Modify a field's ``_data`` attribute either by: * converting a 'deferred array bytes' tuple into a lazy array, * converting LoadedArrayBytes into an actual numpy array. @@ -1834,7 +1833,7 @@ def calc_array(mask, values): def _field_gen(filename, read_data_bytes, little_ended=False): """ - Returns a generator of "half-formed" PPField instances derived from + Return a generator of "half-formed" PPField instances derived from the given filename. A field returned by the generator is only "half-formed" because its @@ -1966,7 +1965,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): def _convert_constraints(constraints): """ - Converts known constraints from Iris semantics to PP semantics + Convert known constraints from Iris semantics to PP semantics ignoring all unknown constraints. """ @@ -1976,8 +1975,9 @@ def _convert_constraints(constraints): def _make_func(stashobj): """ - Provides unique name-space for each lambda function's stashobj + Provide unique name-space for each lambda function's stashobj variable. + """ return lambda stash: stash == stashobj @@ -2010,7 +2010,7 @@ def _make_func(stashobj): def pp_filter(field): """ - return True if field is to be kept, + Return True if field is to be kept, False if field does not match filter """ @@ -2035,24 +2035,23 @@ def pp_filter(field): def load_cubes(filenames, callback=None, constraints=None): """ - Loads cubes from a list of pp filenames. - - Args: - - * filenames - list of pp filenames to load - - Kwargs: - - * constraints - a list of Iris constraints - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` - - .. note:: - - The resultant cubes may not be in the order that they are in the file - (order is not preserved when there is a field with orography - references) + Load cubes from a list of pp filenames. + + Args + ---- + filenames + list of pp filenames to load + **kwargs + * constraints + a list of Iris constraints + * callback + a function which can be passed on to :func:`iris.io.run_callback` + + Notes + ----- + The resultant cubes may not be in the order that they are in the file + (order is not preserved when there is a field with orography + references) """ return _load_cubes_variable_loader( @@ -2062,24 +2061,23 @@ def load_cubes(filenames, callback=None, constraints=None): def load_cubes_little_endian(filenames, callback=None, constraints=None): """ - Loads cubes from a list of pp filenames containing little-endian data. - - Args: - - * filenames - list of pp filenames to load - - Kwargs: - - * constraints - a list of Iris constraints - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` - - .. note:: - - The resultant cubes may not be in the order that they are in the file - (order is not preserved when there is a field with orography - references) + Load cubes from a list of pp filenames containing little-endian data. + + Args + ---- + filenames + list of pp filenames to load + **kwargs + * constraints + a list of Iris constraints + * callback + a function which can be passed on to :func:`iris.io.run_callback` + + Notes + ----- + The resultant cubes may not be in the order that they are in the file + (order is not preserved when there is a field with orography + references) """ return _load_cubes_variable_loader( @@ -2096,14 +2094,18 @@ def load_pairs_from_fields(pp_fields): Convert an iterable of PP fields into an iterable of tuples of (Cubes, PPField). - Args: - - * pp_fields: + Args + ---- + pp_fields: An iterable of :class:`iris.fileformats.pp.PPField`. - Returns: + Returns + ------- + :class:`iris.cube.Cube` An iterable of :class:`iris.cube.Cube`\ s. + Notes + ----- This capability can be used to filter out fields before they are passed to the load pipeline, and amend the cubes once they are created, using PP metadata conditions. Where this filtering @@ -2192,26 +2194,30 @@ def save(cube, target, append=False, field_coords=None): """ Use the PP saving rules (and any user rules) to save a cube to a PP file. - Args: + Args + ---- + cube: :class:`iris.cube.Cube` - * cube - A :class:`iris.cube.Cube` - * target - A filename or open file handle. - - Kwargs: - - * append - Whether to start a new file afresh or add the cube(s) - to the end of the file. - Only applicable when target is a filename, not a file - handle. - Default is False. - - * field_coords - list of 2 coords or coord names which are to be used - for reducing the given cube into 2d slices, - which will ultimately determine the x and y - coordinates of the resulting fields. - If None, the final two dimensions are chosen - for slicing. + target + A filename or open file handle. + **kwargs + * append + Whether to start a new file afresh or add the cube(s) + to the end of the file. + Only applicable when target is a filename, not a file + handle. + Default is False. + * field_coords + list of 2 coords or coord names which are to be used + for reducing the given cube into 2d slices, + which will ultimately determine the x and y + coordinates of the resulting fields. + If None, the final two dimensions are chosen + for slicing. + + Notes + ----- See also :func:`iris.io.save`. Note that :func:`iris.save` is the preferred method of saving. This allows a :class:`iris.cube.CubeList` or a sequence of cubes to be saved to a PP file. @@ -2226,21 +2232,18 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): Use the PP saving rules to convert a cube or iterable of cubes to an iterable of (2D cube, PP field) pairs. - Args: - - * cube: + Args + ---- + cube: A :class:`iris.cube.Cube` - - Kwargs: - - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - - * target: - A filename or open file handle. + **kwargs + * field_coords: + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + * target: + A filename or open file handle. """ # Open issues @@ -2348,21 +2351,18 @@ def as_fields(cube, field_coords=None, target=None): Use the PP saving rules (and any user rules) to convert a cube to an iterable of PP fields. - Args: - - * cube: + Args + ---- + cube A :class:`iris.cube.Cube` - - Kwargs: - - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - - * target: - A filename or open file handle. + **kwargs + * field_coords: + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + * target: + A filename or open file handle. """ return ( @@ -2377,22 +2377,22 @@ def save_fields(fields, target, append=False): """ Save an iterable of PP fields to a PP file. - Args: - - * fields: + Args + ---- + fields: An iterable of PP fields. - * target: + target: A filename or open file handle. - - Kwargs: - - * append: - Whether to start a new file afresh or add the cube(s) to the end - of the file. - Only applicable when target is a filename, not a file handle. - Default is False. - - See also :func:`iris.io.save`. + **kwargs + * append: + Whether to start a new file afresh or add the cube(s) to the end + of the file. + Only applicable when target is a filename, not a file handle. + Default is False. + + See Also + -------- + :func:`iris.io.save`. """ # Open issues diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 82f40dbf14..ebccec47ee 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -9,6 +9,7 @@ # SciTools/iris-code-generators:tools/gen_rules.py import calendar +from functools import wraps import cf_units import numpy as np @@ -514,7 +515,7 @@ def _new_coord_and_dims( _HOURS_UNIT = cf_units.Unit("hours") -def _epoch_date_hours(epoch_hours_unit, datetime): +def _epoch_date_hours_internals(epoch_hours_unit, datetime): """ Return an 'hours since epoch' number for a date. @@ -547,7 +548,7 @@ def _epoch_date_hours(epoch_hours_unit, datetime): if m == 0: # Add a 'January', by changing month=0 to 1. m = 1 - if calendar == cf_units.CALENDAR_GREGORIAN: + if calendar == cf_units.CALENDAR_STANDARD: days_offset += 31 elif calendar == cf_units.CALENDAR_360_DAY: days_offset += 30 @@ -560,7 +561,7 @@ def _epoch_date_hours(epoch_hours_unit, datetime): if y == 0: # Add a 'Year 0', by changing year=0 to 1. y = 1 - if calendar == cf_units.CALENDAR_GREGORIAN: + if calendar == cf_units.CALENDAR_STANDARD: days_in_year_0 = 366 elif calendar == cf_units.CALENDAR_360_DAY: days_in_year_0 = 360 @@ -589,6 +590,30 @@ def _epoch_date_hours(epoch_hours_unit, datetime): return epoch_hours +_epoch_date_hours_cache = {} +_epoch_date_hours_cache_max_size = 128 # lru_cache default + + +@wraps(_epoch_date_hours_internals) +def _epoch_date_hours(epoch_hours_unit, datetime): + # Not using functools.lru_cache because it does an equality check that fails + # on datetime objects from different calendars. + + key = (epoch_hours_unit, hash(datetime)) + + if key not in _epoch_date_hours_cache: + _epoch_date_hours_cache[key] = _epoch_date_hours_internals( + epoch_hours_unit, datetime + ) + + # Limit cache size + while len(_epoch_date_hours_cache) > _epoch_date_hours_cache_max_size: + oldest_item = next(iter(_epoch_date_hours_cache)) + _epoch_date_hours_cache.pop(oldest_item, None) + + return _epoch_date_hours_cache[key] + + def _convert_time_coords( lbcode, lbtim, diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index ed156b5a05..e6b3748f9b 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -398,7 +398,7 @@ def _calendar_rules(cube, pp): if time_coord is not None: if time_coord.units.calendar == "360_day": pp.lbtim.ic = 2 - elif time_coord.units.calendar == "gregorian": + elif time_coord.units.calendar == "standard": pp.lbtim.ic = 1 elif time_coord.units.calendar == "365_day": pp.lbtim.ic = 4 diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 034fa4baab..4659f70ae3 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -131,20 +131,26 @@ def decode_uri(uri, default="file"): return scheme, part -def expand_filespecs(file_specs): +def expand_filespecs(file_specs, files_expected=True): """ Find all matching file paths from a list of file-specs. - Args: - - * file_specs (iterable of string): - File paths which may contain '~' elements or wildcards. - - Returns: - A well-ordered list of matching absolute file paths. - If any of the file-specs match no existing files, an - exception is raised. - + Parameters + ---------- + file_specs : iterable of str + File paths which may contain ``~`` elements or wildcards. + files_expected : bool, default=True + Whether file is expected to exist (i.e. for load). + + Returns + ------- + list of str + if files_expected is ``True``: + A well-ordered list of matching absolute file paths. + If any of the file-specs match no existing files, an + exception is raised. + if files_expected is ``False``: + A list of expanded file paths. """ # Remove any hostname component - currently unused filenames = [ @@ -154,26 +160,30 @@ def expand_filespecs(file_specs): for fn in file_specs ] - # Try to expand all filenames as globs - glob_expanded = OrderedDict( - [[fn, sorted(glob.glob(fn))] for fn in filenames] - ) - - # If any of the specs expanded to an empty list then raise an error - all_expanded = glob_expanded.values() - - if not all(all_expanded): - msg = "One or more of the files specified did not exist:" - for pattern, expanded in glob_expanded.items(): - if expanded: - msg += '\n - "{}" matched {} file(s)'.format( - pattern, len(expanded) - ) - else: - msg += '\n * "{}" didn\'t match any files'.format(pattern) - raise IOError(msg) + if files_expected: + # Try to expand all filenames as globs + glob_expanded = OrderedDict( + [[fn, sorted(glob.glob(fn))] for fn in filenames] + ) - return [fname for fnames in all_expanded for fname in fnames] + # If any of the specs expanded to an empty list then raise an error + all_expanded = glob_expanded.values() + if not all(all_expanded): + msg = "One or more of the files specified did not exist:" + for pattern, expanded in glob_expanded.items(): + if expanded: + msg += '\n - "{}" matched {} file(s)'.format( + pattern, len(expanded) + ) + else: + msg += '\n * "{}" didn\'t match any files'.format( + pattern + ) + raise IOError(msg) + result = [fname for fnames in all_expanded for fname in fnames] + else: + result = filenames + return result def load_files(filenames, callback, constraints=None): @@ -216,7 +226,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Takes a list of urls and a callback function, and returns a generator + Takes a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. .. note:: @@ -226,11 +236,11 @@ def load_http(urls, callback): """ # Create default dict mapping iris format handler to its associated filenames + from iris.fileformats import FORMAT_AGENT + handler_map = collections.defaultdict(list) for url in urls: - handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec( - url, None - ) + handling_format_spec = FORMAT_AGENT.get_spec(url, None) handler_map[handling_format_spec].append(url) # Call each iris format handler with the appropriate filenames @@ -356,65 +366,64 @@ def save(source, target, saver=None, **kwargs): A custom saver can be provided to the function to write to a different file format. - Args: - - * source: - :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or - sequence of cubes. - * target: - A filename (or writeable, depending on file format). + Parameters + ---------- + source : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` + target : str or pathlib.PurePath or io.TextIOWrapper When given a filename or file, Iris can determine the - file format. Filename can be given as a string or - :class:`pathlib.PurePath`. - - Kwargs: - - * saver: - Optional. Specifies the file format to save. + file format. + saver : str or function, optional + Specifies the file format to save. If omitted, Iris will attempt to determine the format. - If a string, this is the recognised filename extension (where the actual filename may not have it). + Otherwise the value is a saver function, of the form: ``my_saver(cube, target)`` plus any custom keywords. It is assumed that a saver will accept an ``append`` keyword - if it's file format can handle multiple cubes. See also + if its file format can handle multiple cubes. See also :func:`iris.io.add_saver`. + **kwargs : dict, optional + All other keywords are passed through to the saver function; see the + relevant saver documentation for more information on keyword arguments. - All other keywords are passed through to the saver function; see the - relevant saver documentation for more information on keyword arguments. - - Examples:: + Warnings + -------- + Saving a cube whose data has been loaded lazily + (if `cube.has_lazy_data()` returns `True`) to the same file it expects + to load data from will cause both the data in-memory and the data on + disk to be lost. - # Save a cube to PP - iris.save(my_cube, "myfile.pp") + .. code-block:: python - # Save a cube list to a PP file, appending to the contents of the file - # if it already exists - iris.save(my_cube_list, "myfile.pp", append=True) + cube = iris.load_cube("somefile.nc") + # The next line causes data loss in 'somefile.nc' and the cube. + iris.save(cube, "somefile.nc") - # Save a cube to netCDF, defaults to NETCDF4 file format - iris.save(my_cube, "myfile.nc") + In general, overwriting a file which is the source for any lazily loaded + data can result in corruption. Users should proceed with caution when + attempting to overwrite an existing file. - # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option - iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") + Examples + -------- + >>> # Setting up + >>> import iris + >>> my_cube = iris.load_cube(iris.sample_data_path('air_temp.pp')) + >>> my_cube_list = iris.load(iris.sample_data_path('space_weather.nc')) - .. warning:: + >>> # Save a cube to PP + >>> iris.save(my_cube, "myfile.pp") - Saving a cube whose data has been loaded lazily - (if `cube.has_lazy_data()` returns `True`) to the same file it expects - to load data from will cause both the data in-memory and the data on - disk to be lost. + >>> # Save a cube list to a PP file, appending to the contents of the file + >>> # if it already exists + >>> iris.save(my_cube_list, "myfile.pp", append=True) - .. code-block:: python + >>> # Save a cube to netCDF, defaults to NETCDF4 file format + >>> iris.save(my_cube, "myfile.nc") - cube = iris.load_cube("somefile.nc") - # The next line causes data loss in 'somefile.nc' and the cube. - iris.save(cube, "somefile.nc") + >>> # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option + >>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") - In general, overwriting a file which is the source for any lazily loaded - data can result in corruption. Users should proceed with caution when - attempting to overwrite an existing file. """ from iris.cube import Cube, CubeList @@ -423,6 +432,8 @@ def save(source, target, saver=None, **kwargs): if isinstance(target, pathlib.PurePath): target = str(target) if isinstance(target, str) and saver is None: + # Converts tilde or wildcards to absolute path + (target,) = expand_filespecs([str(target)], False) saver = find_saver(target) elif hasattr(target, "name") and saver is None: saver = find_saver(target.name) diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 4c421792a7..b00eb3f117 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -11,6 +11,8 @@ """ import datetime +from itertools import chain, combinations +import warnings import cf_units from cf_units import Unit @@ -25,69 +27,146 @@ from pandas.tseries.index import DatetimeIndex # pandas <0.20 import iris -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube +from iris._deprecation import warn_deprecated +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord +from iris.cube import Cube, CubeList -def _add_iris_coord(cube, name, points, dim, calendar=None): +def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): """ - Add a Coord to a Cube from a Pandas index or columns array. + Create a Coord or other dimensional metadata from a Pandas index or columns array. - If no calendar is specified for a time series, Gregorian is assumed. + If no calendar is specified for a time series, Standard is assumed. """ units = Unit("unknown") if calendar is None: - calendar = cf_units.CALENDAR_GREGORIAN + calendar = cf_units.CALENDAR_STANDARD + + # Getting everything into a single datetime format is hard! + + # Convert out of NumPy's own datetime format. + if np.issubdtype(values.dtype, np.datetime64): + values = pandas.to_datetime(values) - # Convert pandas datetime objects to python datetime obejcts. - if isinstance(points, DatetimeIndex): - points = np.array([i.to_pydatetime() for i in points]) + # Convert pandas datetime objects to python datetime objects. + if isinstance(values, DatetimeIndex): + values = np.array([i.to_pydatetime() for i in values]) # Convert datetime objects to Iris' current datetime representation. - if points.dtype == object: + if values.dtype == object: dt_types = (datetime.datetime, cftime.datetime) - if all([isinstance(i, dt_types) for i in points]): + if all([isinstance(i, dt_types) for i in values]): units = Unit("hours since epoch", calendar=calendar) - points = units.date2num(points) - - points = np.array(points) - if np.issubdtype(points.dtype, np.number) and iris.util.monotonic( - points, strict=True - ): - coord = DimCoord(points, units=units) - coord.rename(name) + values = units.date2num(values) + + values = np.array(values) + + if dm_class is None: + if np.issubdtype(values.dtype, np.number) and iris.util.monotonic( + values, strict=True + ): + dm_class = DimCoord + else: + dm_class = AuxCoord + + instance = dm_class(values, units=units) + if name is not None: + # Use rename() to attempt standard_name but fall back on long_name. + instance.rename(str(name)) + + return instance + + +def _add_iris_coord(cube, name, points, dim, calendar=None): + """ + Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array. + """ + # Most functionality has been abstracted to _get_dimensional_metadata, + # allowing re-use in as_cube() and as_cubes(). + coord = _get_dimensional_metadata(name, points, calendar) + + if coord.__class__ == DimCoord: cube.add_dim_coord(coord, dim) else: - coord = AuxCoord(points, units=units) - coord.rename(name) cube.add_aux_coord(coord, dim) -def as_cube(pandas_array, copy=True, calendars=None): +def _series_index_unique(pandas_series: pandas.Series): """ - Convert a Pandas array into an Iris cube. + Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. - Args: + Iterates through grouping single index levels, then combinations of 2 + levels, then 3 etcetera, until single :class:`~pandas.Series` values per + group are found. Returns a ``tuple`` of the index levels that group to + produce single values, as soon as one is found. - * pandas_array - A Pandas Series or DataFrame. + Returns ``None`` if no index level combination produces single values. - Kwargs: + """ + unique_number = pandas_series.nunique() + pandas_index = pandas_series.index + levels_range = range(pandas_index.nlevels) + if unique_number == 1: + # Scalar - identical for all indices. + result = () + else: + result = None + levels_combinations = chain( + *[ + combinations(levels_range, levels + 1) + for levels in levels_range + ] + ) + for lc in levels_combinations: + if pandas_series.groupby(level=lc).nunique().max() == 1: + result = lc + # Escape as early as possible - heavy operation. + break + return result + + +def as_cube( + pandas_array, + copy=True, + calendars=None, +): + """ + Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. + + .. deprecated:: 3.3.0 + + This function is scheduled for removal in a future release, being + replaced by :func:`iris.pandas.as_cubes`, which offers richer + dimensional intelligence. - * copy - Whether to make a copy of the data. - Defaults to True. + Parameters + ---------- + pandas_array : :class:`pandas.Series` or :class:`pandas.DataFrame` + The Pandas object to convert + copy : bool, default=True + Whether to copy `pandas_array`, or to create array views where + possible. Provided in case of memory limit concerns. + calendars : dict, optional + A dict mapping a dimension to a calendar. Required to convert datetime + indices/columns. - * calendars - A dict mapping a dimension to a calendar. - Required to convert datetime indices/columns. + Notes + ----- + This function will copy your data by default. Example usage:: as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) - as_cube(data_frame, calendars={1: cf_units.CALENDAR_GREGORIAN}) - - .. note:: This function will copy your data by default. + as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) """ + message = ( + "iris.pandas.as_cube has been deprecated, and will be removed in a " + "future release. Please use iris.pandas.as_cubes instead." + ) + warn_deprecated(message) + calendars = calendars or {} if pandas_array.ndim not in [1, 2]: raise ValueError( @@ -116,6 +195,302 @@ def as_cube(pandas_array, copy=True, calendars=None): return cube +def as_cubes( + pandas_structure, + copy=True, + calendars=None, + aux_coord_cols=None, + cell_measure_cols=None, + ancillary_variable_cols=None, +): + """ + Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. + + The index of `pandas_structure` will be used for generating the + :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`\\ s. + Other dimensional metadata may span multiple dimensions - based on how the + column values vary with the index values. + + Parameters + ---------- + pandas_structure : :class:`pandas.Series` or :class:`pandas.DataFrame` + The Pandas object to convert + copy : bool, default=True + Whether the Cube :attr:`~iris.cube.Cube.data` is a copy of the + `pandas_structure` column, or a view of the same array. Arrays other than + the data (coords etc.) are always copies. This option is provided to + help with memory size concerns. + calendars : dict, optional + Calendar conversions for individual date-time coordinate + columns/index-levels e.g. ``{"my_column": cf_units.CALENDAR_360_DAY}``. + aux_coord_cols, cell_measure_cols, ancillary_variable_cols : list of str, optional + Names of columns to be converted into :class:`~iris.coords.AuxCoord`, + :class:`~iris.coords.CellMeasure` and + :class:`~iris.coords.AncillaryVariable` objects. + + Returns + -------- + :class:`~iris.cube.CubeList` + One :class:`~iris.cube.Cube` for each column not referenced in + `aux_coord_cols`/`cell_measure_cols`/`ancillary_variable_cols`. + + Notes + ----- + A :class:`~pandas.DataFrame` using columns as a second data dimension will + need to be 'melted' before conversion. See the Examples for how. + + Dask ``DataFrame``\\s are not supported. + + Examples + -------- + >>> from iris.pandas import as_cubes + >>> import numpy as np + >>> from pandas import DataFrame, Series + + Converting a simple :class:`~pandas.Series` : + + >>> my_series = Series([300, 301, 302], name="air_temperature") + >>> converted_cubes = as_cubes(my_series) + >>> print(converted_cubes) + 0: air_temperature / (unknown) (unknown: 3) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (unknown: 3) + Dimension coordinates: + unknown x + + A :class:`~pandas.DataFrame`, with a custom index becoming the + :class:`~iris.coords.DimCoord` : + + >>> my_df = DataFrame({ + ... "air_temperature": [300, 301, 302], + ... "longitude": [30, 40, 50] + ... }) + >>> my_df = my_df.set_index("longitude") + >>> converted_cubes = as_cubes(my_df) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (longitude: 3) + Dimension coordinates: + longitude x + + A :class:`~pandas.DataFrame` representing two 3-dimensional datasets, + including a 2-dimensional :class:`~iris.coords.AuxCoord` : + + >>> my_df = DataFrame({ + ... "air_temperature": np.arange(300, 312, 1), + ... "air_pressure": np.arange(1000, 1012, 1), + ... "longitude": [0, 10] * 6, + ... "latitude": [25, 25, 35, 35] * 3, + ... "height": ([0] * 4) + ([100] * 4) + ([200] * 4), + ... "in_region": [True, False, False, False] * 3 + ... }) + >>> print(my_df) + air_temperature air_pressure longitude latitude height in_region + 0 300 1000 0 25 0 True + 1 301 1001 10 25 0 False + 2 302 1002 0 35 0 False + 3 303 1003 10 35 0 False + 4 304 1004 0 25 100 True + 5 305 1005 10 25 100 False + 6 306 1006 0 35 100 False + 7 307 1007 10 35 100 False + 8 308 1008 0 25 200 True + 9 309 1009 10 25 200 False + 10 310 1010 0 35 200 False + 11 311 1011 10 35 200 False + >>> my_df = my_df.set_index(["longitude", "latitude", "height"]) + >>> my_df = my_df.sort_index() + >>> converted_cubes = as_cubes(my_df, aux_coord_cols=["in_region"]) + >>> print(converted_cubes) + 0: air_temperature / (unknown) (longitude: 2; latitude: 2; height: 3) + 1: air_pressure / (unknown) (longitude: 2; latitude: 2; height: 3) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (longitude: 2; latitude: 2; height: 3) + Dimension coordinates: + longitude x - - + latitude - x - + height - - x + Auxiliary coordinates: + in_region x x - + + Pandas uses ``NaN`` rather than masking data. Converted + :class:`~iris.cube.Cube`\\s can be masked in downstream user code : + + >>> my_series = Series([300, np.NaN, 302], name="air_temperature") + >>> converted_cube = as_cubes(my_series)[0] + >>> print(converted_cube.data) + [300. nan 302.] + >>> converted_cube.data = np.ma.masked_invalid(converted_cube.data) + >>> print(converted_cube.data) + [300.0 -- 302.0] + + If the :class:`~pandas.DataFrame` uses columns as a second dimension, + :func:`pandas.melt` should be used to convert the data to the expected + n-dimensional format : + + >>> my_df = DataFrame({ + ... "latitude": [35, 25], + ... 0: [300, 301], + ... 10: [302, 303], + ... }) + >>> print(my_df) + latitude 0 10 + 0 35 300 302 + 1 25 301 303 + >>> my_df = my_df.melt( + ... id_vars=["latitude"], + ... value_vars=[0, 10], + ... var_name="longitude", + ... value_name="air_temperature" + ... ) + >>> print(my_df) + latitude longitude air_temperature + 0 35 0 300 + 1 25 0 301 + 2 35 10 302 + 3 25 10 303 + >>> my_df = my_df.set_index(["latitude", "longitude"]) + >>> my_df = my_df.sort_index() + >>> converted_cube = as_cubes(my_df)[0] + >>> print(converted_cube) + air_temperature / (unknown) (latitude: 2; longitude: 2) + Dimension coordinates: + latitude x - + longitude - x + + """ + if pandas_structure.empty: + return CubeList() + + calendars = calendars or {} + aux_coord_cols = aux_coord_cols or [] + cell_measure_cols = cell_measure_cols or [] + ancillary_variable_cols = ancillary_variable_cols or [] + + is_series = isinstance(pandas_structure, pandas.Series) + + if copy: + pandas_structure = pandas_structure.copy() + + pandas_index = pandas_structure.index + if not pandas_index.is_unique: + message = ( + f"DataFrame index ({pandas_index.names}) is not unique per " + "row; cannot be used for DimCoords." + ) + raise ValueError(message) + + if not pandas_index.is_monotonic: + # Need monotonic index for use in DimCoord(s). + # This function doesn't sort_index itself since that breaks the + # option to return a data view instead of a copy. + message = ( + "Pandas index is not monotonic. Consider using the " + "sort_index() method before passing in." + ) + raise ValueError(message) + + cube_shape = getattr(pandas_index, "levshape", (pandas_index.nunique(),)) + n_rows = len(pandas_structure) + if np.product(cube_shape) > n_rows: + message = ( + f"Not all index values have a corresponding row - {n_rows} rows " + f"cannot be reshaped into {cube_shape}. Consider padding with NaN " + "rows where needed." + ) + raise ValueError(message) + + cube_kwargs = {} + + def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): + # Common convenience to get the right DM in the right format for + # Cube creation. + calendar = calendars.get(name_) + instance = _get_dimensional_metadata( + name_, values_, calendar, dm_class_ + ) + return (instance, dimensions_) + + # DimCoords. + dim_coord_kwarg = [] + for ix, dim_name in enumerate(pandas_index.names): + if hasattr(pandas_index, "levels"): + coord_points = pandas_index.levels[ix] + else: + coord_points = pandas_index + new_dim_coord = format_dimensional_metadata( + DimCoord, coord_points, dim_name, ix + ) + dim_coord_kwarg.append(new_dim_coord) + cube_kwargs["dim_coords_and_dims"] = dim_coord_kwarg + + # Other dimensional metadata. + class_arg_mapping = [ + (AuxCoord, aux_coord_cols, "aux_coords_and_dims"), + (CellMeasure, cell_measure_cols, "cell_measures_and_dims"), + ( + AncillaryVariable, + ancillary_variable_cols, + "ancillary_variables_and_dims", + ), + ] + + if is_series: + columns_ignored = any([len(t[1]) > 0 for t in class_arg_mapping]) + if columns_ignored: + ignored_args = ", ".join([t[2] for t in class_arg_mapping]) + message = f"The input pandas_structure is a Series; ignoring arguments: {ignored_args} ." + warnings.warn(message) + class_arg_mapping = [] + + non_data_names = [] + for dm_class, column_names, kwarg in class_arg_mapping: + class_kwarg = [] + non_data_names.extend(column_names) + for column_name in column_names: + column = pandas_structure[column_name] + + # Should be impossible for None to be returned - would require a + # non-unique index, which we protect against. + dimensions = _series_index_unique(column) + + content = column.to_numpy() + # Remove duplicate entries to get down to the correct dimensions + # for this object. _series_index_unique should have ensured + # that we are indeed removing the duplicates. + shaped = content.reshape(cube_shape) + indices = [0] * len(cube_shape) + for dim in dimensions: + indices[dim] = slice(None) + collapsed = shaped[tuple(indices)] + + new_dm = format_dimensional_metadata( + dm_class, collapsed, column_name, dimensions + ) + class_kwarg.append(new_dm) + + cube_kwargs[kwarg] = class_kwarg + + # Cube creation. + if is_series: + data_series_list = [pandas_structure] + else: + data_series_list = [ + pandas_structure[column_name] + for column_name in pandas_structure.columns + if column_name not in non_data_names + ] + cubes = CubeList() + for data_series in data_series_list: + cube_data = data_series.to_numpy().reshape(cube_shape) + new_cube = Cube(cube_data, **cube_kwargs) + if data_series.name is not None: + # Use rename() to attempt standard_name but fall back on long_name. + new_cube.rename(str(data_series.name)) + cubes.append(new_cube) + + return cubes + + def _as_pandas_coord(coord): """Convert an Iris Coord into a Pandas index or columns array.""" index = coord.points diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 0e9645c783..2da91e8c67 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -587,14 +587,14 @@ def _fixup_dates(coord, values): # Convert coordinate values into tuples of # (year, month, day, hour, min, sec) dates = [coord.units.num2date(val).timetuple()[0:6] for val in values] - if coord.units.calendar == "gregorian": + if coord.units.calendar == "standard": r = [datetime.datetime(*date) for date in dates] else: try: - import nc_time_axis + import nc_time_axis # noqa: F401 except ImportError: msg = ( - "Cannot plot against time in a non-gregorian " + "Cannot plot against time in a non-standard " 'calendar, because "nc_time_axis" is not available : ' "Install the package from " "https://github.com/SciTools/nc-time-axis to enable " @@ -603,12 +603,10 @@ def _fixup_dates(coord, values): raise IrisError(msg) r = [ - nc_time_axis.CalendarDateTime( - cftime.datetime(*date, calendar=coord.units.calendar), - coord.units.calendar, - ) + cftime.datetime(*date, calendar=coord.units.calendar) for date in dates ] + values = np.empty(len(r), dtype=object) values[:] = r return values @@ -647,20 +645,43 @@ def _u_object_from_v_object(v_object): def _get_plot_objects(args): - if len(args) > 1 and isinstance( + if len(args) > 2 and isinstance( + args[2], (iris.cube.Cube, iris.coords.Coord) + ): + # three arguments + u_object, v_object1, v_object2 = args[:3] + u1, v1 = _uv_from_u_object_v_object(u_object, v_object1) + _, v2 = _uv_from_u_object_v_object(u_object, v_object2) + args = args[3:] + if u1.size != v1.size or u1.size != v2.size: + msg = "The x and y-axis objects are not all compatible. They should have equal sizes but got ({}: {}), ({}: {}) and ({}: {})" + raise ValueError( + msg.format( + u_object.name(), + u1.size, + v_object1.name(), + v1.size, + v_object2.name(), + v2.size, + ) + ) + u = u1 + v = (v1, v2) + v_object = (v_object1, v_object2) + elif len(args) > 1 and isinstance( args[1], (iris.cube.Cube, iris.coords.Coord) ): # two arguments u_object, v_object = args[:2] u, v = _uv_from_u_object_v_object(u_object, v_object) args = args[2:] - if len(u) != len(v): + if u.size != v.size: msg = ( "The x and y-axis objects are not compatible. They should " "have equal sizes but got ({}: {}) and ({}: {})." ) raise ValueError( - msg.format(u_object.name(), len(u), v_object.name(), len(v)) + msg.format(u_object.name(), u.size, v_object.name(), v.size) ) else: # single argument @@ -675,7 +696,7 @@ def _get_plot_objects(args): if ( isinstance(v_object, iris.cube.Cube) and isinstance(u_object, iris.coords.Coord) - and iris.util.guess_coord_axis(u_object) in ["Y", "Z"] + and iris.util.guess_coord_axis(u_object) == "Z" ): u_object, v_object = v_object, u_object u, v = v, u @@ -825,6 +846,52 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): return result +def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): + """ + This function is equivalend to _draw_two_1d_from_points but expects two + y-axis variables rather than one (such as is required for .fill_between). It + can't be used where the y-axis variables are string coordinates. The y-axis + variable provided first has precedence where the two differ on whether the + axis should be inverted or whether a map should be drawn. + """ + # NB. In the interests of clarity we use "u" to refer to the horizontal + # axes on the matplotlib plot and "v" for the vertical axes. + + # retrieve the objects that are plotted on the horizontal and vertical + # axes (cubes or coordinates) and their respective values, along with the + # argument tuple with these objects removed + u_object, v_objects, u, vs, args = _get_plot_objects(args) + + v_object1, _ = v_objects + v1, v2 = vs + + # if both u_object and v_object are coordinates then check if a map + # should be drawn + if ( + isinstance(u_object, iris.coords.Coord) + and isinstance(v_object1, iris.coords.Coord) + and _can_draw_map([v_object1, u_object]) + ): + # Replace non-cartopy subplot/axes with a cartopy alternative and set + # the transform keyword. + kwargs = _ensure_cartopy_axes_and_determine_kwargs( + u_object, v_object1, kwargs + ) + + axes = kwargs.pop("axes", None) + draw_method = getattr(axes if axes else plt, draw_method_name) + if arg_func is not None: + args, kwargs = arg_func(u, v1, v2, *args, **kwargs) + result = draw_method(*args, **kwargs) + else: + result = draw_method(u, v1, v2, *args, **kwargs) + + # Invert y-axis if necessary. + _invert_yaxis(v_object1, axes) + + return result + + def _replace_axes_with_cartopy_axes(cartopy_proj): """ Replace non-cartopy subplot/axes with a cartopy alternative @@ -845,7 +912,9 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): ylabel=ax.get_ylabel(), ) else: + position = ax.get_position(original=True) _ = fig.add_axes( + position, projection=cartopy_proj, title=ax.get_title(), xlabel=ax.get_xlabel(), @@ -978,16 +1047,28 @@ def _map_common( # is useful in anywhere other than this plotting routine, it may be better # placed in the CS. if getattr(x_coord, "circular", False): + original_length = y.shape[1] _, direction = iris.util.monotonic( x_coord.points, return_direction=True ) y = np.append(y, y[:, 0:1], axis=1) x = np.append(x, x[:, 0:1] + 360 * direction, axis=1) data = ma.concatenate([data, data[:, 0:1]], axis=1) - if "_v_data" in kwargs: - v_data = kwargs["_v_data"] - v_data = ma.concatenate([v_data, v_data[:, 0:1]], axis=1) - kwargs["_v_data"] = v_data + + # Having extended the data, we also need to extend extra kwargs for + # matplotlib (e.g. point colours) + for key, val in kwargs.items(): + try: + val_arr = np.array(val) + except TypeError: + continue + if val_arr.ndim >= 2 and val_arr.shape[1] == original_length: + # Concatenate the first column to the end of the data then + # update kwargs + val_arr = ma.concatenate( + [val_arr, val_arr[:, 0:1, ...]], axis=1 + ) + kwargs[key] = val_arr # Replace non-cartopy subplot/axes with a cartopy alternative and set the # transform keyword. @@ -1587,6 +1668,45 @@ def scatter(x, y, *args, **kwargs): return _draw_1d_from_points("scatter", _plot_args, *args, **kwargs) +def fill_between(x, y1, y2, *args, **kwargs): + """ + Plots y1 and y2 against x, and fills the space between them. + + Args: + + * x: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + A cube or a coordinate to plot on the x-axis. + + * y1: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + First cube or a coordinate to plot on the y-axis. + + * y2: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + Second cube or a coordinate to plot on the y-axis. + + Kwargs: + + * axes: :class:`matplotlib.axes.Axes` + The axes to use for drawing. Defaults to the current axes if none + provided. + + See :func:`matplotlib.pyplot.fill_between` for details of additional valid + keyword arguments. + + """ + # here we are more specific about argument types than generic 1d plotting + if not isinstance(x, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("x must be a cube or a coordinate.") + if not isinstance(y1, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("y1 must be a cube or a coordinate.") + if not isinstance(y1, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("y2 must be a cube or a coordinate.") + args = (x, y1, y2) + args + _plot_args = None + return _draw_two_1d_from_points( + "fill_between", _plot_args, *args, **kwargs + ) + + # Provide convenience show method from pyplot show = plt.show diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 2c4a94b1d0..18ed2554a3 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -71,7 +71,7 @@ def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): if result is not None: draw_edges = mode == iris.coords.POINT_MODE bar = plt.colorbar( - result, orientation="horizontal", drawedges=draw_edges + result, ax=axes, orientation="horizontal", drawedges=draw_edges ) has_known_units = not ( cube.units.is_unknown() or cube.units.is_no_unit() @@ -311,5 +311,19 @@ def scatter(x, y, *args, **kwargs): return result +def fill_between(x, y1, y2, *args, **kwargs): + """ + Draws a labelled fill_between plot based on the given cubes or coordinates. + + See :func:`iris.plot.fill_between` for details of valid arguments and + keyword arguments. + + """ + axes = kwargs.get("axes") + result = iplt.fill_between(x, y1, y2, *args, **kwargs) + _label_1d_plot(x, y1, axes=axes) + return result + + # Provide a convenience show method from pyplot. show = plt.show diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index c1df4f628b..4840de8cdb 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -11,15 +11,8 @@ The primary class for this module is :class:`IrisTest`. -By default, this module sets the matplotlib backend to "agg". But when -this module is imported it checks ``sys.argv`` for the flag "-d". If -found, it is removed from ``sys.argv`` and the matplotlib backend is -switched to "tkagg" to allow the interactive visual inspection of -graphical test results. - """ -import codecs import collections from collections.abc import Mapping import contextlib @@ -34,44 +27,29 @@ import math import os import os.path +from pathlib import Path import re import shutil import subprocess import sys -import threading -from typing import Dict, List +from typing import AnyStr import unittest from unittest import mock import warnings import xml.dom.minidom import zlib -import filelock import numpy as np import numpy.ma as ma import requests import iris.config import iris.cube +import iris.tests.graphics as graphics import iris.util -# Test for availability of matplotlib. -# (And remove matplotlib as an iris.tests dependency.) -try: - import matplotlib - - # Override any user settings e.g. from matplotlibrc file. - matplotlib.rcdefaults() - # Set backend *after* rcdefaults, as we don't want that overridden (#3846). - matplotlib.use("agg") - # Standardise the figure size across matplotlib versions. - # This permits matplotlib png image comparison. - matplotlib.rcParams["figure.figsize"] = [8.0, 6.0] - import matplotlib.pyplot as plt -except ImportError: - MPL_AVAILABLE = False -else: - MPL_AVAILABLE = True +MPL_AVAILABLE = graphics.MPL_AVAILABLE + try: from osgeo import gdal # noqa @@ -111,10 +89,6 @@ #: Basepath for test results. _RESULT_PATH = os.path.join(os.path.dirname(__file__), "results") -#: Default perceptual hash size. -_HASH_SIZE = 16 -#: Default maximum perceptual hash hamming distance. -_HAMMING_DISTANCE = 2 if "--data-files-used" in sys.argv: sys.argv.remove("--data-files-used") @@ -131,18 +105,6 @@ os.environ["IRIS_TEST_CREATE_MISSING"] = "true" -# Whether to display matplotlib output to the screen. -_DISPLAY_FIGURES = False - -if MPL_AVAILABLE and "-d" in sys.argv: - sys.argv.remove("-d") - plt.switch_backend("tkagg") - _DISPLAY_FIGURES = True - -# Threading non re-entrant blocking lock to ensure thread-safe plotting. -_lock = threading.Lock() - - def main(): """A wrapper for unittest.main() which adds iris.test specific options to the help (-h) output.""" if "-h" in sys.argv or "--help" in sys.argv: @@ -179,41 +141,71 @@ def main(): unittest.main() -def get_data_path(relative_path): +def _assert_masked_array(assertion, a, b, strict, **kwargs): + # Compare masks. + a_mask, b_mask = ma.getmaskarray(a), ma.getmaskarray(b) + np.testing.assert_array_equal(a_mask, b_mask) + + if strict: + # Compare all data values. + assertion(a.data, b.data, **kwargs) + else: + # Compare only unmasked data values. + assertion( + ma.compressed(a), + ma.compressed(b), + **kwargs, + ) + + +def assert_masked_array_equal(a, b, strict=False): + """ + Check that masked arrays are equal. This requires the + unmasked values and masks to be identical. + + Args: + + * a, b (array-like): + Two arrays to compare. + + Kwargs: + + * strict (bool): + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. + """ - Return the absolute path to a data file when given the relative path - as a string, or sequence of strings. + _assert_masked_array(np.testing.assert_array_equal, a, b, strict) + + +def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): + """ + Check that masked arrays are almost equal. This requires the + masks to be identical, and the unmasked values to be almost + equal. + + Args: + + * a, b (array-like): + Two arrays to compare. + + Kwargs: + + * strict (bool): + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. + + * decimal (int): + Equality tolerance level for + :meth:`numpy.testing.assert_array_almost_equal`, with the meaning + 'abs(desired-actual) < 0.5 * 10**(-decimal)' """ - if not isinstance(relative_path, str): - relative_path = os.path.join(*relative_path) - test_data_dir = iris.config.TEST_DATA_DIR - if test_data_dir is None: - test_data_dir = "" - data_path = os.path.join(test_data_dir, relative_path) - - if _EXPORT_DATAPATHS_FILE is not None: - _EXPORT_DATAPATHS_FILE.write(data_path + "\n") - - if isinstance(data_path, str) and not os.path.exists(data_path): - # if the file is gzipped, ungzip it and return the path of the ungzipped - # file. - gzipped_fname = data_path + ".gz" - if os.path.exists(gzipped_fname): - with gzip.open(gzipped_fname, "rb") as gz_fh: - try: - with open(data_path, "wb") as fh: - fh.writelines(gz_fh) - except IOError: - # Put ungzipped data file in a temporary path, since we - # can't write to the original path (maybe it is owned by - # the system.) - _, ext = os.path.splitext(data_path) - data_path = iris.util.create_temp_filename(suffix=ext) - with open(data_path, "wb") as fh: - fh.writelines(gz_fh) - - return data_path + _assert_masked_array( + np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal + ) class IrisTest_nometa(unittest.TestCase): @@ -221,11 +213,6 @@ class IrisTest_nometa(unittest.TestCase): _assertion_counts = collections.defaultdict(int) - @classmethod - def setUpClass(cls): - # Ensure that the CF profile if turned-off for testing. - iris.site_configuration["cf_profile"] = None - def _assert_str_same( self, reference_str, @@ -250,6 +237,43 @@ def _assert_str_same( % (type_comparison_name, reference_filename, diff) ) + @staticmethod + def get_data_path(relative_path): + """ + Return the absolute path to a data file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, str): + relative_path = os.path.join(*relative_path) + test_data_dir = iris.config.TEST_DATA_DIR + if test_data_dir is None: + test_data_dir = "" + data_path = os.path.join(test_data_dir, relative_path) + + if _EXPORT_DATAPATHS_FILE is not None: + _EXPORT_DATAPATHS_FILE.write(data_path + "\n") + + if isinstance(data_path, str) and not os.path.exists(data_path): + # if the file is gzipped, ungzip it and return the path of the ungzipped + # file. + gzipped_fname = data_path + ".gz" + if os.path.exists(gzipped_fname): + with gzip.open(gzipped_fname, "rb") as gz_fh: + try: + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + except IOError: + # Put ungzipped data file in a temporary path, since we + # can't write to the original path (maybe it is owned by + # the system.) + _, ext = os.path.splitext(data_path) + data_path = iris.util.create_temp_filename(suffix=ext) + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + + return data_path + @staticmethod def get_result_path(relative_path): """ @@ -261,25 +285,6 @@ def get_result_path(relative_path): relative_path = os.path.join(*relative_path) return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) - def assertStringEqual( - self, reference_str, test_str, type_comparison_name="strings" - ): - if reference_str != test_str: - diff = "\n".join( - difflib.unified_diff( - reference_str.splitlines(), - test_str.splitlines(), - "Reference", - "Test result", - "", - "", - 0, - ) - ) - self.fail( - "{} do not match:\n{}".format(type_comparison_name, diff) - ) - def result_path(self, basename=None, ext=""): """ Return the full path to a test result, generated from the \ @@ -373,8 +378,8 @@ def assertCDL(self, netcdf_filename, reference_filename=None, flags="-h"): flags = list(map(str, flags)) try: - # Python3 only: use subprocess.run() - args = ["ncdump"] + flags + [netcdf_filename] + exe_path = env_bin_path("ncdump") + args = [exe_path] + flags + [netcdf_filename] cdl = subprocess.check_output(args, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: print(exc.output) @@ -598,16 +603,6 @@ def _recordWarningMatches(self, expected_regexp=""): expr = re.compile(expected_regexp) matches.extend(message for message in messages if expr.search(message)) - @contextlib.contextmanager - def assertWarnsRegexp(self, expected_regexp=""): - # Check that a warning is raised matching a given expression. - with self._recordWarningMatches(expected_regexp) as matches: - yield - - msg = "Warning matching '{}' not raised." - msg = msg.format(expected_regexp) - self.assertTrue(matches, msg) - @contextlib.contextmanager def assertLogs(self, logger=None, level=None, msg_regex=None): """ @@ -654,85 +649,14 @@ def assertNoWarningsRegexp(self, expected_regexp=""): msg = msg.format(expected_regexp, matches) self.assertFalse(matches, msg) - def _assertMaskedArray(self, assertion, a, b, strict, **kwargs): - # Define helper function to extract unmasked values as a 1d - # array. - def unmasked_data_as_1d_array(array): - array = ma.asarray(array) - if array.ndim == 0: - if array.mask: - data = np.array([]) - else: - data = np.array([array.data]) - else: - data = array.data[~ma.getmaskarray(array)] - return data - - # Compare masks. This will also check that the array shapes - # match, which is not tested when comparing unmasked values if - # strict is False. - a_mask, b_mask = ma.getmaskarray(a), ma.getmaskarray(b) - np.testing.assert_array_equal(a_mask, b_mask) - - if strict: - assertion(a.data, b.data, **kwargs) - else: - assertion( - unmasked_data_as_1d_array(a), - unmasked_data_as_1d_array(b), - **kwargs, - ) - - def assertMaskedArrayEqual(self, a, b, strict=False): - """ - Check that masked arrays are equal. This requires the - unmasked values and masks to be identical. - - Args: - - * a, b (array-like): - Two arrays to compare. - - Kwargs: - - * strict (bool): - If True, perform a complete mask and data array equality check. - If False (default), the data array equality considers only unmasked - elements. - - """ - self._assertMaskedArray(np.testing.assert_array_equal, a, b, strict) + assertMaskedArrayEqual = staticmethod(assert_masked_array_equal) def assertArrayAlmostEqual(self, a, b, decimal=6): np.testing.assert_array_almost_equal(a, b, decimal=decimal) - def assertMaskedArrayAlmostEqual(self, a, b, decimal=6, strict=False): - """ - Check that masked arrays are almost equal. This requires the - masks to be identical, and the unmasked values to be almost - equal. - - Args: - - * a, b (array-like): - Two arrays to compare. - - Kwargs: - - * strict (bool): - If True, perform a complete mask and data array equality check. - If False (default), the data array equality considers only unmasked - elements. - - * decimal (int): - Equality tolerance level for - :meth:`numpy.testing.assert_array_almost_equal`, with the meaning - 'abs(desired-actual) < 0.5 * 10**(-decimal)' - - """ - self._assertMaskedArray( - np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal - ) + assertMaskedArrayAlmostEqual = staticmethod( + assert_masked_array_almost_equal + ) def assertArrayAllClose(self, a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): """ @@ -872,137 +796,10 @@ def check_graphic(self): output directory, and the imagerepo.json file being updated. """ - from PIL import Image - import imagehash - - dev_mode = os.environ.get("IRIS_TEST_CREATE_MISSING") - unique_id = self._unique_id() - repo_fname = os.path.join(_RESULT_PATH, "imagerepo.json") - with open(repo_fname, "rb") as fi: - repo: Dict[str, List[str]] = json.load( - codecs.getreader("utf-8")(fi) - ) - - try: - #: The path where the images generated by the tests should go. - image_output_directory = os.path.join( - os.path.dirname(__file__), "result_image_comparison" - ) - if not os.access(image_output_directory, os.W_OK): - if not os.access(os.getcwd(), os.W_OK): - raise IOError( - "Write access to a local disk is required " - "to run image tests. Run the tests from a " - "current working directory you have write " - "access to to avoid this issue." - ) - else: - image_output_directory = os.path.join( - os.getcwd(), "iris_image_test_output" - ) - result_fname = os.path.join( - image_output_directory, "result-" + unique_id + ".png" - ) - - if not os.path.isdir(image_output_directory): - # Handle race-condition where the directories are - # created sometime between the check above and the - # creation attempt below. - try: - os.makedirs(image_output_directory) - except OSError as err: - # Don't care about "File exists" - if err.errno != 17: - raise - - def _create_missing(): - fname = "{}.png".format(phash) - base_uri = ( - "https://scitools.github.io/test-iris-imagehash/" - "images/v4/{}" - ) - uri = base_uri.format(fname) - hash_fname = os.path.join(image_output_directory, fname) - uris = repo.setdefault(unique_id, []) - uris.append(uri) - print("Creating image file: {}".format(hash_fname)) - figure.savefig(hash_fname) - msg = "Creating imagerepo entry: {} -> {}" - print(msg.format(unique_id, uri)) - lock = filelock.FileLock( - os.path.join(_RESULT_PATH, "imagerepo.lock") - ) - # The imagerepo.json file is a critical resource, so ensure - # thread safe read/write behaviour via platform independent - # file locking. - with lock.acquire(timeout=600): - with open(repo_fname, "wb") as fo: - json.dump( - repo, - codecs.getwriter("utf-8")(fo), - indent=4, - sort_keys=True, - ) - - # Calculate the test result perceptual image hash. - buffer = io.BytesIO() - figure = plt.gcf() - figure.savefig(buffer, format="png") - buffer.seek(0) - phash = imagehash.phash(Image.open(buffer), hash_size=_HASH_SIZE) - - if unique_id not in repo: - # The unique id might not be fully qualified, e.g. - # expects iris.tests.test_quickplot.TestLabels.test_contour.0, - # but got test_quickplot.TestLabels.test_contour.0 - # if we find single partial match from end of the key - # then use that, else fall back to the unknown id state. - matches = [key for key in repo if key.endswith(unique_id)] - if len(matches) == 1: - unique_id = matches[0] - - if unique_id in repo: - uris = repo[unique_id] - # Extract the hex basename strings from the uris. - hexes = [ - os.path.splitext(os.path.basename(uri))[0] for uri in uris - ] - # Create the expected perceptual image hashes from the uris. - to_hash = imagehash.hex_to_hash - expected = [to_hash(uri_hex) for uri_hex in hexes] - - # Calculate hamming distance vector for the result hash. - distances = [e - phash for e in expected] - - if np.all([hd > _HAMMING_DISTANCE for hd in distances]): - if dev_mode: - _create_missing() - else: - figure.savefig(result_fname) - msg = ( - "Bad phash {} with hamming distance {} " - "for test {}." - ) - msg = msg.format(phash, distances, unique_id) - if _DISPLAY_FIGURES: - emsg = "Image comparison would have failed: {}" - print(emsg.format(msg)) - else: - emsg = "Image comparison failed: {}" - raise AssertionError(emsg.format(msg)) - else: - if dev_mode: - _create_missing() - else: - figure.savefig(result_fname) - emsg = "Missing image test result: {}." - raise AssertionError(emsg.format(unique_id)) - - if _DISPLAY_FIGURES: - plt.show() - - finally: - plt.close() + graphics.check_graphic( + self._unique_id(), + _RESULT_PATH, + ) def _remove_testcase_patches(self): """Helper to remove per-testcase patches installed by :meth:`patch`.""" @@ -1214,37 +1011,15 @@ class IrisTest(IrisTest_nometa, metaclass=_TestTimingsMetaclass): pass +get_data_path = IrisTest.get_data_path get_result_path = IrisTest.get_result_path -class GraphicsTestMixin: - - # nose directive: dispatch tests concurrently. - _multiprocess_can_split_ = True - - def setUp(self): - # Acquire threading non re-entrant blocking lock to ensure - # thread-safe plotting. - _lock.acquire() - # Make sure we have no unclosed plots from previous tests before - # generating this one. - if MPL_AVAILABLE: - plt.close("all") - - def tearDown(self): - # If a plotting test bombs out it can leave the current figure - # in an odd state, so we make sure it's been disposed of. - if MPL_AVAILABLE: - plt.close("all") - # Release the non re-entrant blocking lock. - _lock.release() - - -class GraphicsTest(GraphicsTestMixin, IrisTest): +class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): pass -class GraphicsTest_nometa(GraphicsTestMixin, IrisTest_nometa): +class GraphicsTest_nometa(graphics.GraphicsTestMixin, IrisTest_nometa): # Graphicstest without the metaclass providing test timings. pass @@ -1290,23 +1065,7 @@ class MyGeoTiffTests(test.IrisTest): return skip(fn) -def skip_plot(fn): - """ - Decorator to choose whether to run tests, based on the availability of the - matplotlib library. - - Example usage: - @skip_plot - class MyPlotTests(test.GraphicsTest): - ... - - """ - skip = unittest.skipIf( - condition=not MPL_AVAILABLE, - reason="Graphics tests require the matplotlib library.", - ) - - return skip(fn) +skip_plot = graphics.skip_plot skip_sample_data = unittest.skipIf( @@ -1352,3 +1111,30 @@ def wrapped(self, *args, **kwargs): return result return wrapped + + +def env_bin_path(exe_name: AnyStr = None): + """ + Return a Path object for (an executable in) the environment bin directory. + + Parameters + ---------- + exe_name : str + If set, the name of an executable to append to the path. + + Returns + ------- + exe_path : Path + A path to the bin directory, or an executable file within it. + + Notes + ----- + For use in tests which spawn commands which should call executables within + the Python environment, since many IDEs (Eclipse, PyCharm) don't + automatically include this location in $PATH (as opposed to $PYTHONPATH). + """ + exe_path = Path(os.__file__) + exe_path = (exe_path / "../../../bin").resolve() + if exe_name is not None: + exe_path = exe_path / exe_name + return exe_path diff --git a/lib/iris/tests/graphics/README.md b/lib/iris/tests/graphics/README.md new file mode 100755 index 0000000000..b26f1720e8 --- /dev/null +++ b/lib/iris/tests/graphics/README.md @@ -0,0 +1,51 @@ +# Graphics Tests + +Iris may be used to create various forms of graphical output; to ensure +the output is consistent, there are automated tests to check against +known acceptable graphical output. + +At present graphical tests are used in the following areas of Iris: + +* Module `iris.tests.test_plot` +* Module `iris.tests.test_quickplot` +* Gallery plots contained in `docs/gallery_tests`. + + +## Challenges + +Iris uses many dependencies that provide functionality, an example that +applies here is `matplotlib`. When there are updates to `matplotlib` or a +dependency of it, this may result in a change in the rendered graphical +output. This means that there may be no changes to `Iris`, but due to an +updated dependency any automated tests that compare a graphical output to a +known acceptable output may fail. The failure may also not be visually +perceived as it may be a simple pixel shift. + + +## Testing Strategy + +The `iris.tests.IrisTest_nometa.check_graphic` test routine calls out to +`iris.tests.graphics.check_graphic` which tests against the **acceptable** +result. It does this using an image **hash** comparison technique which allows +us to be robust against minor variations based on underlying library updates. + +This consists of: + +* The `graphics.check_graphic` function uses a perceptual + **image hash** of the outputs (see https://github.com/JohannesBuchner/imagehash) + as the basis for checking test results. + +* The hashes of known **acceptable** results for each test are stored in a + lookup dictionary, saved to the repo file + `lib/iris/tests/results/imagerepo.json` + (`link `_) . + +* An actual baseline image for each hash value is stored in the test data + repository (`link `_). + +* The baseline images allow human-eye assessment of whether a new output is + judged to be close enough to the older ones, or not. + +* The utility script `iris/tests/idiff.py` automates checking, enabling the + developer to easily compare the proposed new **acceptable** result image + against the existing accepted baseline image, for each failing test. \ No newline at end of file diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py new file mode 100755 index 0000000000..a083de3934 --- /dev/null +++ b/lib/iris/tests/graphics/__init__.py @@ -0,0 +1,288 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Contains Iris graphic testing utilities + +By default, this module sets the matplotlib backend to "agg". But when +this module is imported it checks ``sys.argv`` for the flag "-d". If +found, it is removed from ``sys.argv`` and the matplotlib backend is +switched to "tkagg" to allow the interactive visual inspection of +graphical test results. +""" + +import codecs +import io +import json +import os +from pathlib import Path +import sys +import threading +from typing import Callable, Dict, Union +import unittest + +import filelock + +# Test for availability of matplotlib. +# (And remove matplotlib as an iris.tests dependency.) +try: + import matplotlib + + # Override any user settings e.g. from matplotlibrc file. + matplotlib.rcdefaults() + # Set backend *after* rcdefaults, as we don't want that overridden (#3846). + matplotlib.use("agg") + # Standardise the figure size across matplotlib versions. + # This permits matplotlib png image comparison. + matplotlib.rcParams["figure.figsize"] = [8.0, 6.0] + import matplotlib.pyplot as plt +except ImportError: + MPL_AVAILABLE = False +else: + MPL_AVAILABLE = True + +# Whether to display matplotlib output to the screen. +_DISPLAY_FIGURES = False + +if MPL_AVAILABLE and "-d" in sys.argv: + sys.argv.remove("-d") + plt.switch_backend("tkagg") + _DISPLAY_FIGURES = True + +# Threading non re-entrant blocking lock to ensure thread-safe plotting in the +# GraphicsTestMixin. +_lock = threading.Lock() + +#: Default perceptual hash size. +HASH_SIZE = 16 +#: Default maximum perceptual hash hamming distance. +HAMMING_DISTANCE = 2 +# Prefix for image test results (that aren't yet verified as good to add to +# reference images) +RESULT_PREFIX = "result-" +# Name of the imagerepo json and associated file lock +IMAGE_REPO_DIR = Path(__file__).parents[1] / "results" +IMAGE_REPO_PATH = IMAGE_REPO_DIR / "imagerepo.json" +IMAGE_REPO_LOCK_PATH = IMAGE_REPO_DIR / "imagerepo.lock" + + +__all__ = [ + "GraphicsTestMixin", + "MPL_AVAILABLE", + "RESULT_PREFIX", + "check_graphic", + "fully_qualify", + "generate_repo_from_baselines", + "get_phash", + "read_repo_json", + "repos_equal", + "skip_plot", + "write_repo_json", +] + + +def _output_dir() -> Path: + test_output_dir = Path(__file__).parents[1] / Path( + "result_image_comparison" + ) + + if not os.access(test_output_dir, os.W_OK): + if not os.access(Path("."), os.W_OK): + raise IOError( + "Write access to a local disk is required " + "to run image tests. Run the tests from a " + "current working directory you have write " + "access to to avoid this issue." + ) + else: + test_output_dir = Path(".") / "iris_image_test_output" + + return test_output_dir + + +def read_repo_json() -> Dict[str, str]: + with open(IMAGE_REPO_PATH, "rb") as fi: + repo: Dict[str, str] = json.load(codecs.getreader("utf-8")(fi)) + return repo + + +def write_repo_json(data: Dict[str, str]) -> None: + string_data = {} + for key, val in data.items(): + string_data[key] = str(val) + with open(IMAGE_REPO_PATH, "wb") as fo: + json.dump( + string_data, + codecs.getwriter("utf-8")(fo), + indent=4, + sort_keys=True, + ) + + +def repos_equal(repo1: Dict[str, str], repo2: Dict[str, str]) -> bool: + if sorted(repo1.keys()) != sorted(repo2.keys()): + return False + for key, val in repo1.items(): + if str(val) != str(repo2[key]): + return False + return True + + +def get_phash(input: Path) -> str: + from PIL import Image + import imagehash + + return imagehash.phash(Image.open(input), hash_size=HASH_SIZE) + + +def generate_repo_from_baselines(baseline_image_dir: Path) -> Dict[str, str]: + repo = {} + for path in baseline_image_dir.iterdir(): + phash = get_phash(path) + repo[path.stem] = phash + return repo + + +def fully_qualify(test_id: str, repo: str) -> Dict[str, str]: + # If the test_id isn't in the repo as it stands, look for it + if test_id not in repo: + test_id_candidates = [x for x in repo.keys() if x.endswith(test_id)] + if len(test_id_candidates) == 1: + (test_id,) = test_id_candidates + return test_id + + +def check_graphic(test_id: str, results_dir: Union[str, Path]) -> None: + """ + Check the hash of the current matplotlib figure matches the expected + image hash for the current graphic test. + + To create missing image test results, set the IRIS_TEST_CREATE_MISSING + environment variable before running the tests. This will result in new + and appropriately ".png" image files being generated in the image + output directory, and the imagerepo.json file being updated. + + """ + from imagehash import hex_to_hash + + dev_mode = os.environ.get("IRIS_TEST_CREATE_MISSING") + + #: The path where the images generated by the tests should go. + test_output_dir = _output_dir() + test_output_dir.mkdir(exist_ok=True) + + # The path where the image matching this test should be saved if necessary + result_path = test_output_dir / f"{RESULT_PREFIX}{test_id}.png" + + results_dir = Path(results_dir) + repo = read_repo_json() + + # Check if test_id is fully qualified, if it's not then try to work + # out what it should be + test_id = fully_qualify(test_id, repo) + + try: + + def _create_missing(phash: str) -> None: + + output_path = test_output_dir / (test_id + ".png") + + print(f"Creating image file: {output_path}") + figure.savefig(output_path) + + msg = "Creating imagerepo entry: {} -> {}" + print(msg.format(test_id, phash)) + # The imagerepo.json file is a critical resource, so ensure + # thread safe read/write behaviour via platform independent + # file locking. + lock = filelock.FileLock(IMAGE_REPO_LOCK_PATH) + with lock.acquire(timeout=600): + # Read the file again in case it changed, then edit before + # releasing lock + repo = read_repo_json() + repo[test_id] = phash + write_repo_json(repo) + + # Calculate the test result perceptual image hash. + buffer = io.BytesIO() + figure = plt.gcf() + figure.savefig(buffer, format="png") + buffer.seek(0) + phash = get_phash(buffer) + + if test_id in repo: + + expected = hex_to_hash(repo[test_id]) + + # Calculate hamming distance vector for the result hash. + distance = expected - phash + + if distance > HAMMING_DISTANCE: + if dev_mode: + _create_missing(phash) + else: + figure.savefig(result_path) + msg = ( + "Bad phash {} with hamming distance {} " "for test {}." + ) + msg = msg.format(phash, distance, test_id) + if _DISPLAY_FIGURES: + emsg = "Image comparison would have failed: {}" + print(emsg.format(msg)) + else: + emsg = "Image comparison failed: {}" + raise AssertionError(emsg.format(msg)) + else: + if dev_mode: + _create_missing(phash) + else: + figure.savefig(result_path) + emsg = "Missing image test result: {}." + raise AssertionError(emsg.format(test_id)) + + if _DISPLAY_FIGURES: + plt.show() + + finally: + plt.close() + + +class GraphicsTestMixin: + def setUp(self) -> None: + # Acquire threading non re-entrant blocking lock to ensure + # thread-safe plotting. + _lock.acquire() + # Make sure we have no unclosed plots from previous tests before + # generating this one. + if MPL_AVAILABLE: + plt.close("all") + + def tearDown(self) -> None: + # If a plotting test bombs out it can leave the current figure + # in an odd state, so we make sure it's been disposed of. + if MPL_AVAILABLE: + plt.close("all") + # Release the non re-entrant blocking lock. + _lock.release() + + +def skip_plot(fn: Callable) -> Callable: + """ + Decorator to choose whether to run tests, based on the availability of the + matplotlib library. + + Example usage: + @skip_plot + class MyPlotTests(test.GraphicsTest): + ... + + """ + skip = unittest.skipIf( + condition=not MPL_AVAILABLE, + reason="Graphics tests require the matplotlib library.", + ) + + return skip(fn) diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py new file mode 100755 index 0000000000..a355f2cf82 --- /dev/null +++ b/lib/iris/tests/graphics/idiff.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Provides "diff-like" comparison of images. + +Currently relies on matplotlib for image processing so limited to PNG format. + +""" + +import argparse +from pathlib import Path +import re +import sys +import warnings + +# Force iris.tests to use the ```tkagg``` backend by using the '-d' +# command-line argument as idiff is an interactive tool that requires a +# gui interface. +sys.argv.append("-d") +from PIL import Image # noqa +import matplotlib.image as mimg # noqa +import matplotlib.pyplot as plt # noqa +import matplotlib.testing.compare as mcompare # noqa +from matplotlib.testing.exceptions import ImageComparisonFailure # noqa +import matplotlib.widgets as mwidget # noqa + +import iris.tests # noqa +import iris.tests.graphics as graphics # noqa + +# Allows restoration of test id from result image name +_RESULT_NAME_PATTERN = re.compile(graphics.RESULT_PREFIX + r"(.*).png") + + +def extract_test_key(result_image_name): + """ + Extracts the name of the test which a result image refers to + """ + name_match = _RESULT_NAME_PATTERN.match(str(result_image_name)) + if name_match: + test_key = name_match.group(1) + else: + emsg = f"Incorrectly named image in result dir: {result_image_name}" + raise ValueError(emsg) + return test_key + + +_POSTFIX_DIFF = "-failed-diff.png" + + +def diff_viewer( + test_id, + status, + phash, + expected_path, + result_path, + diff_fname, +): + fig = plt.figure(figsize=(14, 12)) + plt.suptitle(expected_path.name) + ax = plt.subplot(221) + ax.imshow(mimg.imread(expected_path)) + ax = plt.subplot(222, sharex=ax, sharey=ax) + ax.imshow(mimg.imread(result_path)) + ax = plt.subplot(223, sharex=ax, sharey=ax) + ax.imshow(mimg.imread(diff_fname)) + + result_dir = result_path.parent + + repo = graphics.read_repo_json() + + def accept(event): + if test_id not in repo: + repo[test_id] = phash + graphics.write_repo_json(repo) + out_file = result_dir / (test_id + ".png") + result_path.rename(out_file) + msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" + print(msg) + else: + msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" + print(msg) + result_path.unlink() + diff_fname.unlink() + plt.close() + + def reject(event): + if test_id not in repo: + print(f"REJECTED: {result_path.name}") + else: + msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" + print(msg) + result_path.unlink() + diff_fname.unlink() + plt.close() + + def skip(event): + # Let's keep both the result and the diff files. + print(f"SKIPPED: {result_path.name}") + plt.close() + + ax_accept = plt.axes([0.59, 0.05, 0.1, 0.075]) + ax_reject = plt.axes([0.7, 0.05, 0.1, 0.075]) + ax_skip = plt.axes([0.81, 0.05, 0.1, 0.075]) + baccept = mwidget.Button(ax_accept, "Accept") + baccept.on_clicked(accept) + breject = mwidget.Button(ax_reject, "Reject") + breject.on_clicked(reject) + bskip = mwidget.Button(ax_skip, "Skip") + bskip.on_clicked(skip) + plt.text(0.59, 0.15, status, transform=fig.transFigure) + plt.show() + + +def step_over_diffs(result_dir, display=True): + processed = False + + if display: + msg = "\nComparing the expected image with the test result image." + print(msg) + + # Remove old image diff results. + for fname in result_dir.glob(f"*{_POSTFIX_DIFF}"): + fname.unlink() + + reference_image_dir = Path(iris.tests.get_data_path("images")) + repo = graphics.read_repo_json() + + # Filter out all non-test result image files. + results = [] + for fname in sorted(result_dir.glob(f"{graphics.RESULT_PREFIX}*.png")): + # We only care about PNG images. + try: + im = Image.open(fname) + if im.format != "PNG": + # Ignore - it's not a png image. + continue + except IOError: + # Ignore - it's not an image. + continue + results.append(fname) + + count = len(results) + + for count_index, result_path in enumerate(results): + test_key = extract_test_key(result_path.name) + test_key = graphics.fully_qualify(test_key, repo) + reference_image_path = reference_image_dir / (test_key + ".png") + + try: + # Calculate the test result perceptual image hash. + phash = graphics.get_phash(result_path) + distance = graphics.get_phash(reference_image_path) - phash + except FileNotFoundError: + wmsg = "Ignoring unregistered test result {!r}." + warnings.warn(wmsg.format(test_key)) + continue + + processed = True + + try: + # Creates the diff file when the images aren't identical + mcompare.compare_images(reference_image_path, result_path, tol=0) + except Exception as e: + if isinstance(e, ValueError) or isinstance( + e, ImageComparisonFailure + ): + print(f"Could not compare {result_path}: {e}") + continue + else: + # Propagate the exception, keeping the stack trace + raise + diff_path = result_dir / Path(f"{result_path.stem}{_POSTFIX_DIFF}") + args = phash, reference_image_path, result_path, diff_path + if display: + status = f"Image {count_index + 1} of {count}: hamming distance = {distance}" + prefix = test_key, status + yield prefix + args + else: + yield args + if display and not processed: + print("\nThere are no iris test result images to process.\n") + + +if __name__ == "__main__": + default = Path(iris.tests.__file__).parent / Path( + "result_image_comparison" + ) + description = "Iris graphic test difference tool." + formatter_class = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser( + description=description, formatter_class=formatter_class + ) + help = "path to iris tests result image directory (default: %(default)s)" + parser.add_argument("--resultdir", "-r", default=default, help=help) + help = 'force "iris.tests" to use the tkagg backend (default: %(default)s)' + parser.add_argument("-d", action="store_true", default=True, help=help) + args = parser.parse_args() + result_dir = Path(args.resultdir) + if not result_dir.is_dir(): + emsg = f"Invalid results directory: {result_dir}" + raise ValueError(emsg) + + for args in step_over_diffs(result_dir): + diff_viewer(*args) diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py new file mode 100755 index 0000000000..02ddaad2cb --- /dev/null +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -0,0 +1,74 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Updates imagerepo.json based on the baseline images + +""" + +import argparse +from pathlib import Path + +from imagehash import hex_to_hash + +import iris.tests +import iris.tests.graphics as graphics + + +def update_json(baseline_image_dir: Path, dry_run: bool = False): + repo = graphics.read_repo_json() + suggested_repo = graphics.generate_repo_from_baselines(baseline_image_dir) + + if graphics.repos_equal(repo, suggested_repo): + msg = ( + f"No change in contents of {graphics.IMAGE_REPO_PATH} based on " + f"{baseline_image_dir}" + ) + print(msg) + else: + for key in sorted(set(repo.keys()) | set(suggested_repo.keys())): + old_val = repo.get(key) + new_val = suggested_repo.get(key) + if old_val is None: + repo[key] = suggested_repo[key] + print(key) + print(f"\t{old_val} -> {new_val}") + elif new_val is None: + del repo[key] + print(key) + print(f"\t{old_val} -> {new_val}") + else: + difference = hex_to_hash(str(old_val)) - hex_to_hash( + str(new_val) + ) + if difference > 0: + print(key) + print(f"\t{old_val} -> {new_val} ({difference})") + repo[key] = suggested_repo[key] + if not dry_run: + graphics.write_repo_json(repo) + + +if __name__ == "__main__": + default_baseline_image_dir = Path( + iris.tests.IrisTest.get_data_path("images") + ) + description = "Update imagerepo.json based on contents of the baseline image directory" + formatter_class = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser( + description=description, formatter_class=formatter_class + ) + help = "path to iris tests result image directory (default: %(default)s)" + parser.add_argument( + "--image-dir", default=default_baseline_image_dir, help=help + ) + help = "dry run (don't actually update imagerepo.json)" + parser.add_argument("--dry-run", action="store_true", help=help) + args = parser.parse_args() + update_json( + args.image_dir, + args.dry_run, + ) diff --git a/lib/iris/tests/idiff.py b/lib/iris/tests/idiff.py deleted file mode 100755 index 9770ca116f..0000000000 --- a/lib/iris/tests/idiff.py +++ /dev/null @@ -1,314 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# !/usr/bin/env python -""" -Provides "diff-like" comparison of images. - -Currently relies on matplotlib for image processing so limited to PNG format. - -""" - -import argparse -import codecs -import contextlib -from glob import glob -import json -import os.path -import shutil -import sys -import warnings - -# Force iris.tests to use the ```tkagg``` backend by using the '-d' -# command-line argument as idiff is an interactive tool that requires a -# gui interface. -sys.argv.append("-d") -from PIL import Image # noqa -import filelock # noqa -import imagehash # noqa -import matplotlib.image as mimg # noqa -import matplotlib.pyplot as plt # noqa -import matplotlib.testing.compare as mcompare # noqa -from matplotlib.testing.exceptions import ImageComparisonFailure # noqa -import matplotlib.widgets as mwidget # noqa -import numpy as np # noqa -import requests # noqa - -import iris.tests # noqa -import iris.util as iutil # noqa - -_POSTFIX_DIFF = "-failed-diff.png" -_POSTFIX_JSON = os.path.join("results", "imagerepo.json") -_POSTFIX_LOCK = os.path.join("results", "imagerepo.lock") - - -@contextlib.contextmanager -def temp_png(suffix=""): - if suffix: - suffix = "-{}".format(suffix) - fname = iutil.create_temp_filename(suffix + ".png") - try: - yield fname - finally: - os.remove(fname) - - -def diff_viewer( - repo, - key, - repo_fname, - phash, - status, - expected_fname, - result_fname, - diff_fname, -): - fig = plt.figure(figsize=(14, 12)) - plt.suptitle(os.path.basename(expected_fname)) - ax = plt.subplot(221) - ax.imshow(mimg.imread(expected_fname)) - ax = plt.subplot(222, sharex=ax, sharey=ax) - ax.imshow(mimg.imread(result_fname)) - ax = plt.subplot(223, sharex=ax, sharey=ax) - ax.imshow(mimg.imread(diff_fname)) - - result_dir = os.path.dirname(result_fname) - fname = "{}.png".format(phash) - base_uri = "https://scitools.github.io/test-iris-imagehash/images/v4/{}" - uri = base_uri.format(fname) - phash_fname = os.path.join(result_dir, fname) - - def accept(event): - if uri not in repo[key]: - # Ensure to maintain strict time order where the first uri - # associated with the repo key is the oldest, and the last - # uri is the youngest - repo[key].append(uri) - # Update the image repo. - with open(repo_fname, "wb") as fo: - json.dump( - repo, - codecs.getwriter("utf-8")(fo), - indent=4, - sort_keys=True, - ) - os.rename(result_fname, phash_fname) - msg = "ACCEPTED: {} -> {}" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - else: - msg = "DUPLICATE: {} -> {} (ignored)" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - os.remove(result_fname) - os.remove(diff_fname) - plt.close() - - def reject(event): - if uri not in repo[key]: - print("REJECTED: {}".format(os.path.basename(result_fname))) - else: - msg = "DUPLICATE: {} -> {} (ignored)" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - os.remove(result_fname) - os.remove(diff_fname) - plt.close() - - def skip(event): - # Let's keep both the result and the diff files. - print("SKIPPED: {}".format(os.path.basename(result_fname))) - plt.close() - - ax_accept = plt.axes([0.59, 0.05, 0.1, 0.075]) - ax_reject = plt.axes([0.7, 0.05, 0.1, 0.075]) - ax_skip = plt.axes([0.81, 0.05, 0.1, 0.075]) - baccept = mwidget.Button(ax_accept, "Accept") - baccept.on_clicked(accept) - breject = mwidget.Button(ax_reject, "Reject") - breject.on_clicked(reject) - bskip = mwidget.Button(ax_skip, "Skip") - bskip.on_clicked(skip) - plt.text(0.59, 0.15, status, transform=fig.transFigure) - plt.show() - - -def _calculate_hit(uris, phash, action): - # Extract the hex basename strings from the uris. - hexes = [os.path.splitext(os.path.basename(uri))[0] for uri in uris] - # Create the expected perceptual image hashes from the uris. - to_hash = imagehash.hex_to_hash - expected = [to_hash(uri_hex) for uri_hex in hexes] - # Calculate the hamming distance vector for the result hash. - distances = [e - phash for e in expected] - - if action == "first": - index = 0 - elif action == "last": - index = -1 - elif action == "similar": - index = np.argmin(distances) - elif action == "different": - index = np.argmax(distances) - else: - emsg = "Unknown action: {!r}" - raise ValueError(emsg.format(action)) - - return index, distances[index] - - -def step_over_diffs(result_dir, action, display=True): - processed = False - dname = os.path.dirname(iris.tests.__file__) - lock = filelock.FileLock(os.path.join(dname, _POSTFIX_LOCK)) - if action in ["first", "last"]: - kind = action - elif action in ["similar", "different"]: - kind = "most {}".format(action) - else: - emsg = "Unknown action: {!r}" - raise ValueError(emsg.format(action)) - if display: - msg = ( - "\nComparing the {!r} expected image with " - "the test result image." - ) - print(msg.format(kind)) - - # Remove old image diff results. - target = os.path.join(result_dir, "*{}".format(_POSTFIX_DIFF)) - for fname in glob(target): - os.remove(fname) - - with lock.acquire(timeout=30): - # Load the imagerepo. - repo_fname = os.path.join(dname, _POSTFIX_JSON) - with open(repo_fname, "rb") as fi: - repo = json.load(codecs.getreader("utf-8")(fi)) - - # Filter out all non-test result image files. - target_glob = os.path.join(result_dir, "result-*.png") - results = [] - for fname in sorted(glob(target_glob)): - # We only care about PNG images. - try: - im = Image.open(fname) - if im.format != "PNG": - # Ignore - it's not a png image. - continue - except IOError: - # Ignore - it's not an image. - continue - results.append(fname) - - count = len(results) - - for count_index, result_fname in enumerate(results): - key = os.path.splitext( - "-".join(result_fname.split("result-")[1:]) - )[0] - try: - # Calculate the test result perceptual image hash. - phash = imagehash.phash( - Image.open(result_fname), hash_size=iris.tests._HASH_SIZE - ) - uris = repo[key] - hash_index, distance = _calculate_hit(uris, phash, action) - uri = uris[hash_index] - except KeyError: - wmsg = "Ignoring unregistered test result {!r}." - warnings.warn(wmsg.format(key)) - continue - with temp_png(key) as expected_fname: - processed = True - resource = requests.get(uri) - if resource.status_code == 200: - with open(expected_fname, "wb") as fo: - fo.write(resource.content) - else: - # Perhaps the uri has not been pushed into the repo yet, - # so check if a local "developer" copy is available ... - local_fname = os.path.join( - result_dir, os.path.basename(uri) - ) - if not os.path.isfile(local_fname): - emsg = "Bad URI {!r} for test {!r}." - raise ValueError(emsg.format(uri, key)) - else: - # The temporary expected filename has the test name - # baked into it, and is used in the diff plot title. - # So copy the local file to the exected file to - # maintain this helpfulness. - shutil.copy(local_fname, expected_fname) - try: - mcompare.compare_images( - expected_fname, result_fname, tol=0 - ) - except Exception as e: - if isinstance(e, ValueError) or isinstance( - e, ImageComparisonFailure - ): - print( - "Could not compare {}: {}".format(result_fname, e) - ) - continue - else: - # Propagate the exception, keeping the stack trace - raise - diff_fname = os.path.splitext(result_fname)[0] + _POSTFIX_DIFF - args = expected_fname, result_fname, diff_fname - if display: - msg = "Image {} of {}: hamming distance = {} " "[{!r}]" - status = msg.format(count_index + 1, count, distance, kind) - prefix = repo, key, repo_fname, phash, status - yield prefix + args - else: - yield args - if display and not processed: - print("\nThere are no iris test result images to process.\n") - - -if __name__ == "__main__": - default = os.path.join( - os.path.dirname(iris.tests.__file__), "result_image_comparison" - ) - description = "Iris graphic test difference tool." - formatter_class = argparse.RawTextHelpFormatter - parser = argparse.ArgumentParser( - description=description, formatter_class=formatter_class - ) - help = "path to iris tests result image directory (default: %(default)s)" - parser.add_argument("--resultdir", "-r", default=default, help=help) - help = 'force "iris.tests" to use the tkagg backend (default: %(default)s)' - parser.add_argument("-d", action="store_true", default=True, help=help) - help = """ -first - compare result image with first (oldest) expected image -last - compare result image with last (youngest) expected image -similar - compare result image with most similar expected image (default) -different - compare result image with most unsimilar expected image -""" - choices = ("first", "last", "similar", "different") - parser.add_argument( - "action", nargs="?", choices=choices, default="similar", help=help - ) - args = parser.parse_args() - result_dir = args.resultdir - if not os.path.isdir(result_dir): - emsg = "Invalid results directory: {}" - raise ValueError(emsg.format(result_dir)) - for args in step_over_diffs(result_dir, args.action): - diff_viewer(*args) diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 4e3f453e0a..091ecd4378 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -33,7 +33,7 @@ def simple_1d_time_cubes(self, reftimes, coords_points): standard_name="air_temperature", units="K", ) - unit = cf_units.Unit(reftime, calendar="gregorian") + unit = cf_units.Unit(reftime, calendar="standard") coord = iris.coords.DimCoord( points=np.array(coord_points, dtype=np.float32), standard_name="time", @@ -68,7 +68,7 @@ def create_cube(self): ) height = iris.coords.AuxCoord([1.5], standard_name="height", units="m") t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" + "hours since 1970-01-01 00:00:00", calendar="standard" ) time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) @@ -113,7 +113,7 @@ def create_cube(self): [1.5], standard_name="height", units="m" ) t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" + "hours since 1970-01-01 00:00:00", calendar="standard" ) time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) @@ -156,7 +156,7 @@ def create_cube(self): [1.5], standard_name="height", units="m" ) t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" + "hours since 1970-01-01 00:00:00", calendar="standard" ) time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) @@ -196,7 +196,7 @@ def setUp(self): # Time coord t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" + "hours since 1970-01-01 00:00:00", calendar="standard" ) t_coord = iris.coords.DimCoord( [0, 6], standard_name="time", units=t_unit diff --git a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py index 1ace02ea8a..742adc8c15 100644 --- a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py +++ b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py @@ -9,6 +9,8 @@ # importing anything else. import iris.tests as tests # isort:skip +import unittest + import cartopy.crs as ccrs from cf_units import Unit import numpy as np @@ -60,6 +62,9 @@ def test_nearest_sinusoidal(self): res[:, 0], (1, 73, 96), 299.99993826, 3.9223839688e-5 ) + @unittest.skip( + "Deprecated API and provenance of reference numbers unknown." + ) def test_nearest_gnomonic_uk_domain(self): crs = ccrs.Gnomonic(central_latitude=60.0) uk_grid = self.global_grid.intersection( diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index eb2cb04f79..803ac71caa 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -14,17 +14,15 @@ import glob from pathlib import Path import shutil -from subprocess import check_call import tempfile import iris from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD import iris.fileformats.netcdf -from iris.tests import IrisTest -from iris.tests.stock.netcdf import _add_standard_data +from iris.tests.stock.netcdf import _add_standard_data, ncgen_from_cdl -class TestBasicSave(IrisTest): +class TestBasicSave(tests.IrisTest): @classmethod def setUpClass(cls): cls.temp_dir = Path(tempfile.mkdtemp()) @@ -46,11 +44,11 @@ def tearDownClass(cls): def test_example_result_cdls(self): # Snapshot the result of saving the example cases. - for ex_name, filepath in self.example_names_paths.items(): + for ex_name, cdl_path in self.example_names_paths.items(): + # Create a test netcdf file. target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - # Create a netcdf file from the test CDL. - check_call( - f"ncgen {filepath} -k4 -o {target_ncfile_path}", shell=True + ncgen_from_cdl( + cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path ) # Fill in blank data-variables. _add_standard_data(target_ncfile_path) @@ -64,18 +62,18 @@ def test_example_result_cdls(self): refdir_relpath = ( "integration/experimental/ugrid_save/TestBasicSave/" ) - reffile_name = str(Path(filepath).name).replace(".nc", ".cdl") + reffile_name = str(Path(cdl_path).name).replace(".nc", ".cdl") reffile_path = refdir_relpath + reffile_name self.assertCDL(resave_ncfile_path, reference_filename=reffile_path) def test_example_roundtrips(self): # Check that save-and-loadback leaves Iris data unchanged, # for data derived from each UGRID example CDL. - for ex_name, filepath in self.example_names_paths.items(): + for ex_name, cdl_path in self.example_names_paths.items(): + # Create a test netcdf file. target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - # Create a netcdf file from the test CDL. - check_call( - f"ncgen {filepath} -k4 -o {target_ncfile_path}", shell=True + ncgen_from_cdl( + cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path ) # Fill in blank data-variables. _add_standard_data(target_ncfile_path) diff --git a/lib/iris/tests/integration/merge/__init__.py b/lib/iris/tests/integration/merge/__init__.py new file mode 100644 index 0000000000..9374976532 --- /dev/null +++ b/lib/iris/tests/integration/merge/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for the :mod:`iris._merge` package.""" diff --git a/lib/iris/tests/integration/merge/test_merge.py b/lib/iris/tests/integration/merge/test_merge.py new file mode 100644 index 0000000000..f5f92a7a7d --- /dev/null +++ b/lib/iris/tests/integration/merge/test_merge.py @@ -0,0 +1,37 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests for merging cubes. + +""" + +# import iris tests first so that some things can be initialised +# before importing anything else. +import iris.tests as tests # isort:skip + +from iris.coords import DimCoord +from iris.cube import Cube, CubeList + + +class TestContiguous(tests.IrisTest): + def test_form_contiguous_dimcoord(self): + # Test that cube sliced up and remerged in the opposite order maintains + # contiguity. + cube1 = Cube([1, 2, 3], "air_temperature", units="K") + coord1 = DimCoord([3, 2, 1], long_name="spam") + coord1.guess_bounds() + cube1.add_dim_coord(coord1, 0) + cubes = CubeList(cube1.slices_over("spam")) + cube2 = cubes.merge_cube() + coord2 = cube2.coord("spam") + + self.assertTrue(coord2.is_contiguous()) + self.assertArrayEqual(coord2.points, [1, 2, 3]) + self.assertArrayEqual(coord2.bounds, coord1.bounds[::-1, ::-1]) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 340f37dda7..d438c09bd5 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -4,7 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test plot of time coord with non-gregorian calendar. +Test plot of time coord with non-standard calendar. """ @@ -18,10 +18,6 @@ from iris.coords import AuxCoord -if tests.NC_TIME_AXIS_AVAILABLE: - from nc_time_axis import CalendarDateTime - - # Run tests in no graphics mode if matplotlib is not available. if tests.MPL_AVAILABLE: import iris.plot as iplt @@ -48,9 +44,8 @@ def test_360_day_calendar(self): ) for atime in times ] - expected_ydata = np.array( - [CalendarDateTime(time, calendar) for time in times] - ) + + expected_ydata = times (line1,) = iplt.plot(time_coord) result_ydata = line1.get_ydata() self.assertArrayEqual(expected_ydata, result_ydata) diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py new file mode 100755 index 0000000000..77b7f28249 --- /dev/null +++ b/lib/iris/tests/integration/test_Datums.py @@ -0,0 +1,53 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for :class:`iris.coord_systems` datum suppport.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import cartopy.crs as ccrs +import numpy as np + +from iris.coord_systems import GeogCS, LambertConformal + + +class TestDatumTransformation(tests.IrisTest): + def setUp(self): + self.x_points = np.array([-1.5]) + self.y_points = np.array([50.5]) + + self.start_crs = ccrs.OSGB(False) + + def test_transform_points_datum(self): + + # Iris version + wgs84 = GeogCS.from_datum("WGS84") + iris_cs = LambertConformal( + central_lat=54, + central_lon=-4, + secant_latitudes=[52, 56], + ellipsoid=wgs84, + ) + iris_cs_as_cartopy = iris_cs.as_cartopy_crs() + + # Cartopy equivalent + cartopy_cs = ccrs.LambertConformal( + central_latitude=54, + central_longitude=-4, + standard_parallels=[52, 56], + globe=ccrs.Globe("WGS84"), + ) + + expected = cartopy_cs.transform_points( + self.start_crs, self.x_points, self.y_points + ) + + actual = iris_cs_as_cartopy.transform_points( + self.start_crs, self.x_points, self.y_points + ) + + self.assertArrayEqual(expected, actual) diff --git a/lib/iris/tests/integration/test_climatology.py b/lib/iris/tests/integration/test_climatology.py index ba1ccaf888..54d43858fb 100644 --- a/lib/iris/tests/integration/test_climatology.py +++ b/lib/iris/tests/integration/test_climatology.py @@ -13,14 +13,14 @@ from os.path import join as path_join from os.path import sep as os_sep import shutil -from subprocess import check_call import tempfile import iris from iris.tests import stock +from iris.tests.stock.netcdf import ncgen_from_cdl -class TestClimatology(iris.tests.IrisTest): +class TestClimatology(tests.IrisTest): reference_cdl_path = os_sep.join( [ dirname(tests.__file__), @@ -58,12 +58,13 @@ def setUpClass(cls): cls.temp_dir = tempfile.mkdtemp() cls.path_ref_cdl = path_join(cls.temp_dir, "standard.cdl") cls.path_ref_nc = path_join(cls.temp_dir, "standard.nc") - # Create reference CDL file. - with open(cls.path_ref_cdl, "w") as f_out: - f_out.write(cls._simple_cdl_string()) - # Create reference netCDF file from reference CDL. - command = "ncgen -o {} {}".format(cls.path_ref_nc, cls.path_ref_cdl) - check_call(command, shell=True) + # Create reference CDL and netcdf files (with ncgen). + ncgen_from_cdl( + cdl_str=cls._simple_cdl_string(), + cdl_path=cls.path_ref_cdl, + nc_path=cls.path_ref_nc, + ) + cls.path_temp_nc = path_join(cls.temp_dir, "tmp.nc") # Create reference cube. diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py index f7aaa1d05c..3feb637bf8 100644 --- a/lib/iris/tests/integration/test_netcdf.py +++ b/lib/iris/tests/integration/test_netcdf.py @@ -14,7 +14,6 @@ import os.path from os.path import join as path_join import shutil -from subprocess import check_call import tempfile from unittest import mock import warnings @@ -24,7 +23,8 @@ import numpy.ma as ma import iris -from iris.coords import CellMethod +import iris.coord_systems +from iris.coords import CellMethod, DimCoord from iris.cube import Cube, CubeList from iris.fileformats.netcdf import ( CF_CONVENTIONS_VERSION, @@ -32,6 +32,8 @@ UnknownCellMethodWarning, ) import iris.tests.stock as stock +from iris.tests.stock.netcdf import ncgen_from_cdl +import iris.tests.unit.fileformats.netcdf.test_load_cubes as tlc @tests.skip_data @@ -416,7 +418,7 @@ def setUp(self): levels.units = "centimeters" levels.positive = "down" levels.axis = "Z" - levels[:] = np.linspace(0, 10 ** 5, 3) + levels[:] = np.linspace(0, 10**5, 3) volcello.id = "volcello" volcello.out_name = "volcello" @@ -484,6 +486,12 @@ def test_unknown_method(self): @tests.skip_data class TestCoordSystem(tests.IrisTest): + def setUp(self): + tlc.setUpModule() + + def tearDown(self): + tlc.tearDownModule() + def test_load_laea_grid(self): cube = iris.load_cube( tests.get_data_path( @@ -492,6 +500,171 @@ def test_load_laea_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_laea.cml")) + datum_cf_var_cdl = """ + netcdf output { + dimensions: + y = 4 ; + x = 3 ; + variables: + float data(y, x) ; + data :standard_name = "toa_brightness_temperature" ; + data :units = "K" ; + data :grid_mapping = "mercator" ; + int mercator ; + mercator:grid_mapping_name = "mercator" ; + mercator:longitude_of_prime_meridian = 0. ; + mercator:earth_radius = 6378169. ; + mercator:horizontal_datum_name = "OSGB36" ; + float y(y) ; + y:axis = "Y" ; + y:units = "m" ; + y:standard_name = "projection_y_coordinate" ; + float x(x) ; + x:axis = "X" ; + x:units = "m" ; + x:standard_name = "projection_x_coordinate" ; + + // global attributes: + :Conventions = "CF-1.7" ; + :standard_name_vocabulary = "CF Standard Name Table v27" ; + + data: + + data = + 0, 1, 2, + 3, 4, 5, + 6, 7, 8, + 9, 10, 11 ; + + mercator = _ ; + + y = 1, 2, 3, 5 ; + + x = -6, -4, -2 ; + + } + """ + + datum_wkt_cdl = """ +netcdf output5 { +dimensions: + y = 4 ; + x = 3 ; +variables: + float data(y, x) ; + data :standard_name = "toa_brightness_temperature" ; + data :units = "K" ; + data :grid_mapping = "mercator" ; + int mercator ; + mercator:grid_mapping_name = "mercator" ; + mercator:longitude_of_prime_meridian = 0. ; + mercator:earth_radius = 6378169. ; + mercator:longitude_of_projection_origin = 0. ; + mercator:false_easting = 0. ; + mercator:false_northing = 0. ; + mercator:scale_factor_at_projection_origin = 1. ; + mercator:crs_wkt = "PROJCRS[\\"unknown\\",BASEGEOGCRS[\\"unknown\\",DATUM[\\"OSGB36\\",ELLIPSOID[\\"unknown\\",6378169,0,LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8901]]],CONVERSION[\\"unknown\\",METHOD[\\"Mercator (variant B)\\",ID[\\"EPSG\\",9805]],PARAMETER[\\"Latitude of 1st standard parallel\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8823]],PARAMETER[\\"Longitude of natural origin\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8802]],PARAMETER[\\"False easting\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8806]],PARAMETER[\\"False northing\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8807]]],CS[Cartesian,2],AXIS[\\"(E)\\",east,ORDER[1],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]],AXIS[\\"(N)\\",north,ORDER[2],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]]" ; + float y(y) ; + y:axis = "Y" ; + y:units = "m" ; + y:standard_name = "projection_y_coordinate" ; + float x(x) ; + x:axis = "X" ; + x:units = "m" ; + x:standard_name = "projection_x_coordinate" ; + +// global attributes: + :standard_name_vocabulary = "CF Standard Name Table v27" ; + :Conventions = "CF-1.7" ; +data: + + data = + 0, 1, 2, + 3, 4, 5, + 6, 7, 8, + 9, 10, 11 ; + + mercator = _ ; + + y = 1, 2, 3, 5 ; + + x = -6, -4, -2 ; +} + """ + + def test_load_datum_wkt(self): + expected = "OSGB 1936" + nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + + def test_no_load_datum_wkt(self): + nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) + with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(actual, "unknown") + + def test_load_datum_cf_var(self): + expected = "OSGB 1936" + nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + + def test_no_load_datum_cf_var(self): + nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) + with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(actual, "unknown") + + def test_save_datum(self): + expected = "OSGB 1936" + saved_crs = iris.coord_systems.Mercator( + ellipsoid=iris.coord_systems.GeogCS.from_datum("OSGB36") + ) + + base_cube = stock.realistic_3d() + base_lat_coord = base_cube.coord("grid_latitude") + test_lat_coord = DimCoord( + base_lat_coord.points, + standard_name="projection_y_coordinate", + coord_system=saved_crs, + ) + base_lon_coord = base_cube.coord("grid_longitude") + test_lon_coord = DimCoord( + base_lon_coord.points, + standard_name="projection_x_coordinate", + coord_system=saved_crs, + ) + test_cube = Cube( + base_cube.data, + standard_name=base_cube.standard_name, + units=base_cube.units, + dim_coords_and_dims=( + (base_cube.coord("time"), 0), + (test_lat_coord, 1), + (test_lon_coord, 2), + ), + ) + + with self.temp_filename(suffix=".nc") as filename: + iris.save(test_cube, filename) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(filename) + + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + def _get_scale_factor_add_offset(cube, datatype): """Utility function used by netCDF data packing tests.""" @@ -507,9 +680,9 @@ def _get_scale_factor_add_offset(cube, datatype): else: masked = False if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dt.kind == "u": add_offset = cmin elif dt.kind == "i": @@ -691,12 +864,12 @@ def setUpClass(cls): cls.temp_dir = tempfile.mkdtemp() cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl") cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc") - # Create a reference file from the CDL text. - with open(cls.path_test_cdl, "w") as f_out: - f_out.write(cls._geostationary_problem_cdl) - # Call 'ncgen' to make an actual netCDF file from the CDL. - command = "ncgen -o {} {}".format(cls.path_test_nc, cls.path_test_cdl) - check_call(command, shell=True) + # Create reference CDL and netcdf files from the CDL text. + ncgen_from_cdl( + cdl_str=cls._geostationary_problem_cdl, + cdl_path=cls.path_test_cdl, + nc_path=cls.path_test_nc, + ) @classmethod def tearDownClass(cls): diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index db2113025d..e654694aa7 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -683,7 +683,7 @@ def test_as_pairs(self): class TestSaveLBPROC(tests.IrisTest): def create_cube(self, longitude_coord="longitude"): cube = Cube(np.zeros((2, 3, 4))) - tunit = Unit("days since epoch", calendar="gregorian") + tunit = Unit("days since epoch", calendar="standard") tcoord = DimCoord(np.arange(2), standard_name="time", units=tunit) xcoord = DimCoord( np.arange(3), standard_name=longitude_coord, units="degrees" diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py index 4ceac6ab1e..3e87a8d0aa 100644 --- a/lib/iris/tests/integration/test_regridding.py +++ b/lib/iris/tests/integration/test_regridding.py @@ -112,7 +112,7 @@ def test_nearest(self): class TestZonalMean_global(tests.IrisTest): def setUp(self): np.random.seed(0) - self.src = iris.cube.Cube(np.random.random_integers(0, 10, (140, 1))) + self.src = iris.cube.Cube(np.random.randint(0, 10, size=(140, 1))) s_crs = iris.coord_systems.GeogCS(6371229.0) sy_coord = iris.coords.DimCoord( np.linspace(-90, 90, 140), diff --git a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml index 5bba278059..da315c36af 100644 --- a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml +++ b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml @@ -400,7 +400,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -923,7 +923,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -1057,7 +1057,7 @@ - + + diff --git a/lib/iris/tests/results/FF/air_temperature_1.cml b/lib/iris/tests/results/FF/air_temperature_1.cml index 267aa88d23..043b9acc16 100644 --- a/lib/iris/tests/results/FF/air_temperature_1.cml +++ b/lib/iris/tests/results/FF/air_temperature_1.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/FF/air_temperature_2.cml b/lib/iris/tests/results/FF/air_temperature_2.cml index 307c58fe72..200a80b54a 100644 --- a/lib/iris/tests/results/FF/air_temperature_2.cml +++ b/lib/iris/tests/results/FF/air_temperature_2.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/FF/soil_temperature_1.cml b/lib/iris/tests/results/FF/soil_temperature_1.cml index e555a3f5b9..57303636c1 100644 --- a/lib/iris/tests/results/FF/soil_temperature_1.cml +++ b/lib/iris/tests/results/FF/soil_temperature_1.cml @@ -11,7 +11,7 @@ - + + diff --git a/lib/iris/tests/results/FF/surface_altitude_1.cml b/lib/iris/tests/results/FF/surface_altitude_1.cml index 27cfad3d09..2669624d37 100644 --- a/lib/iris/tests/results/FF/surface_altitude_1.cml +++ b/lib/iris/tests/results/FF/surface_altitude_1.cml @@ -11,7 +11,7 @@ - + + diff --git a/lib/iris/tests/results/abf/load.cml b/lib/iris/tests/results/abf/load.cml index e470cbebf3..e7954ab229 100644 --- a/lib/iris/tests/results/abf/load.cml +++ b/lib/iris/tests/results/abf/load.cml @@ -30,7 +30,7 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/easy.cml b/lib/iris/tests/results/analysis/aggregated_by/easy.cml index c4edb9484f..d02c3f12d1 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/easy.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/easy.cml @@ -18,6 +18,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml index 2f8f1e73d7..dc9bdd0df8 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml @@ -36,6 +36,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml index e6b95e3cbc..51e1ae4ff1 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml @@ -31,6 +31,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml new file mode 100644 index 0000000000..8c434479c9 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml new file mode 100644 index 0000000000..cca744ff87 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml new file mode 100644 index 0000000000..8c11bdb505 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml new file mode 100644 index 0000000000..ab7a7195fd --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml new file mode 100644 index 0000000000..d5bb9775fe --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml new file mode 100644 index 0000000000..f7d57a9828 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml new file mode 100644 index 0000000000..50a2c44a98 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml new file mode 100644 index 0000000000..657fb43414 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/areaweights_original.cml b/lib/iris/tests/results/analysis/areaweights_original.cml index 3c33ef500a..651bb648dd 100644 --- a/lib/iris/tests/results/analysis/areaweights_original.cml +++ b/lib/iris/tests/results/analysis/areaweights_original.cml @@ -10,7 +10,7 @@ - + @@ -26,7 +26,7 @@ - + diff --git a/lib/iris/tests/results/analysis/gmean_latitude.cml b/lib/iris/tests/results/analysis/gmean_latitude.cml index ebe22c54f3..26b7fdc8af 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml index 3cd6a93948..94ed36ac88 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml index cc7b3133e0..1db977312b 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude.cml b/lib/iris/tests/results/analysis/hmean_latitude.cml index d953f0e4d9..70e3fcb540 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml index 43700b083c..f762fd643b 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml index e17383ff64..369dca3203 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude.cml b/lib/iris/tests/results/analysis/max_latitude.cml index faa54fff8a..89542d27d3 100644 --- a/lib/iris/tests/results/analysis/max_latitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude.cml b/lib/iris/tests/results/analysis/max_latitude_longitude.cml index 8437e8f4a1..7d24ca7f14 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml index 5b6504dfb1..b4d1e0349c 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d.cml b/lib/iris/tests/results/analysis/max_run_bar_2d.cml new file mode 100644 index 0000000000..32a8a377be --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_bar_2d.cml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml new file mode 100644 index 0000000000..32a8a377be --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_foo_1d.cml b/lib/iris/tests/results/analysis/max_run_foo_1d.cml new file mode 100644 index 0000000000..b2a3bcef56 --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_foo_1d.cml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_foo_2d.cml b/lib/iris/tests/results/analysis/max_run_foo_2d.cml new file mode 100644 index 0000000000..fb8448136f --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_foo_2d.cml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/mean_latitude.cml b/lib/iris/tests/results/analysis/mean_latitude.cml index fcf2ef55be..80921e762d 100644 --- a/lib/iris/tests/results/analysis/mean_latitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml index 5cb139be1a..6ac9400a3a 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml index 573fa1c694..affcf07c07 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude.cml b/lib/iris/tests/results/analysis/median_latitude.cml index 49006c9592..bbf3875688 100644 --- a/lib/iris/tests/results/analysis/median_latitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude.cml b/lib/iris/tests/results/analysis/median_latitude_longitude.cml index 49ec42b391..5663f6d65f 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml index 036c6bb2f9..c0c0d7c46b 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude.cml b/lib/iris/tests/results/analysis/min_latitude.cml index 34a2dc5548..bf20be30a9 100644 --- a/lib/iris/tests/results/analysis/min_latitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude.cml b/lib/iris/tests/results/analysis/min_latitude_longitude.cml index 76c7e96bce..3792645582 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml index 6b484eb591..b43231b7e6 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original.cml b/lib/iris/tests/results/analysis/original.cml index 23129095b6..414de1b6b5 100644 --- a/lib/iris/tests/results/analysis/original.cml +++ b/lib/iris/tests/results/analysis/original.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original_common.cml b/lib/iris/tests/results/analysis/original_common.cml index c1759c12bd..bbfa48d7d8 100644 --- a/lib/iris/tests/results/analysis/original_common.cml +++ b/lib/iris/tests/results/analysis/original_common.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original_hmean.cml b/lib/iris/tests/results/analysis/original_hmean.cml index 952cede1c2..bdc145022c 100644 --- a/lib/iris/tests/results/analysis/original_hmean.cml +++ b/lib/iris/tests/results/analysis/original_hmean.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml index dc1fee2f2b..1ac69490b4 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml index 6fdbe7df00..eb9adb4aef 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset.cml b/lib/iris/tests/results/analysis/regrid/linear_subset.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml index b2aec5e891..a1cff2363e 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml index f6647aa426..98a0b6b805 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/no_overlap.cml b/lib/iris/tests/results/analysis/regrid/no_overlap.cml index 6aa4d218f8..da2f03f1ee 100644 --- a/lib/iris/tests/results/analysis/regrid/no_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/no_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude.cml b/lib/iris/tests/results/analysis/rms_latitude.cml index e409daed2d..d4b1428fb2 100644 --- a/lib/iris/tests/results/analysis/rms_latitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml index 9bdc53fbad..4293087847 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml index 89a593d122..9ca1d23b42 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude.cml index 154d5ef587..a45aefeff4 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml index 770ef9a35a..95e8e3694d 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml index a5ce049ca5..f91f6005b7 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude.cml b/lib/iris/tests/results/analysis/sum_latitude.cml index 943aa9312f..fbb8460fd8 100644 --- a/lib/iris/tests/results/analysis/sum_latitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml index 2eff41339b..cb992f3b9d 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml index a2a46d2ba8..6171dc516b 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude.cml b/lib/iris/tests/results/analysis/variance_latitude.cml index 437587b00d..5b55731396 100644 --- a/lib/iris/tests/results/analysis/variance_latitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml index 391ab8834e..359e40ef8a 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml index 535468acfc..0345eac77b 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/weighted_mean_original.cml b/lib/iris/tests/results/analysis/weighted_mean_original.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_original.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_original.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/categorisation/customcheck.cml b/lib/iris/tests/results/categorisation/customcheck.cml index d6dcc7179d..476a1c56ef 100644 --- a/lib/iris/tests/results/categorisation/customcheck.cml +++ b/lib/iris/tests/results/categorisation/customcheck.cml @@ -19,7 +19,7 @@ + 513, 540, 567, 594]" shape="(23,)" standard_name="time" units="Unit('days since 1970-01-01 00:00:00', calendar='standard')" value_type="int32"/> diff --git a/lib/iris/tests/results/categorisation/quickcheck.cml b/lib/iris/tests/results/categorisation/quickcheck.cml index f64c70350f..b8f3904ad1 100644 --- a/lib/iris/tests/results/categorisation/quickcheck.cml +++ b/lib/iris/tests/results/categorisation/quickcheck.cml @@ -68,7 +68,7 @@ + 513, 540, 567, 594]" shape="(23,)" standard_name="time" units="Unit('days since 1970-01-01 00:00:00', calendar='standard')" value_type="int32"/> diff --git a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml index f10c0be37c..e7213fc7bd 100644 --- a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml index e0d138f327..3ffbbf89e5 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml index 3b435e9ceb..7091aee748 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/cdm/masked_cube.cml b/lib/iris/tests/results/cdm/masked_cube.cml index a38340913e..dcfa8c062f 100644 --- a/lib/iris/tests/results/cdm/masked_cube.cml +++ b/lib/iris/tests/results/cdm/masked_cube.cml @@ -10,7 +10,7 @@ - + + 1000.0, 1006.0]" shape="(8,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/constrained_load/all_10_load_match.cml b/lib/iris/tests/results/constrained_load/all_10_load_match.cml index 6a582f9d67..0712af20fa 100644 --- a/lib/iris/tests/results/constrained_load/all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -92,7 +92,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -144,7 +144,7 @@ - + @@ -165,7 +165,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml index 458474f98a..20971021ac 100644 --- a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml @@ -11,7 +11,7 @@ - + @@ -43,7 +43,7 @@ [0.222443, 0.177555]]" id="a5c170db" long_name="sigma" points="[0.784571, 0.199878]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -64,7 +64,7 @@ - + @@ -96,7 +96,7 @@ [0.222443, 0.177555]]" id="a5c170db" long_name="sigma" points="[0.784571, 0.199878]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -117,7 +117,7 @@ - + @@ -150,7 +150,7 @@ [0.246215, 0.199878]]" id="a5c170db" long_name="sigma" points="[0.803914, 0.222443]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -171,7 +171,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/attribute_constraint.cml b/lib/iris/tests/results/constrained_load/attribute_constraint.cml index 31714035fa..664dc943bc 100644 --- a/lib/iris/tests/results/constrained_load/attribute_constraint.cml +++ b/lib/iris/tests/results/constrained_load/attribute_constraint.cml @@ -12,7 +12,7 @@ - + @@ -131,7 +131,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml index bbafc31987..44e7d077df 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -115,7 +115,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml index bbafc31987..44e7d077df 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -115,7 +115,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml index 2e5005d042..e2852d0151 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml index 2e5005d042..e2852d0151 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml index 40bb37f3ab..772929b0da 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -181,7 +181,7 @@ - + @@ -202,7 +202,7 @@ - + @@ -232,7 +232,7 @@ - + @@ -253,7 +253,7 @@ - + @@ -284,7 +284,7 @@ - + @@ -305,7 +305,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml index 03fed4e61b..0e23de090c 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -181,7 +181,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml index eadbe8f365..a175652c30 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml index eadbe8f365..a175652c30 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml index 77534b9b55..0048a742a6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml @@ -11,7 +11,7 @@ - + @@ -64,7 +64,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml index 77534b9b55..0048a742a6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -64,7 +64,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml index f6727427a1..e24937854d 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml index f6727427a1..e24937854d 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml index daef7ba9dc..218bdd6b1c 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml index daef7ba9dc..218bdd6b1c 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_load_match.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_load_strict.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/coord_api/intersection_reversed.xml b/lib/iris/tests/results/coord_api/intersection_reversed.xml index b966a09b54..b489f95451 100644 --- a/lib/iris/tests/results/coord_api/intersection_reversed.xml +++ b/lib/iris/tests/results/coord_api/intersection_reversed.xml @@ -1,9 +1,9 @@ - + diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt index 6b95b57215..410da3613a 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt @@ -1,4 +1,4 @@ -DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) +DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [ 2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00] diff --git a/lib/iris/tests/results/coord_systems/Mercator.xml b/lib/iris/tests/results/coord_systems/Mercator.xml index e8036ef824..4ea768b41e 100644 --- a/lib/iris/tests/results/coord_systems/Mercator.xml +++ b/lib/iris/tests/results/coord_systems/Mercator.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographic.xml b/lib/iris/tests/results/coord_systems/PolarStereographic.xml new file mode 100644 index 0000000000..85abfc892f --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographic.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml b/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml new file mode 100644 index 0000000000..2fc1554cd7 --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml b/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml new file mode 100644 index 0000000000..de7b5f902c --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/Stereographic.xml b/lib/iris/tests/results/coord_systems/Stereographic.xml index bb12cd94cc..fb338a8e4d 100644 --- a/lib/iris/tests/results/coord_systems/Stereographic.xml +++ b/lib/iris/tests/results/coord_systems/Stereographic.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml index 458b9bf908..463339e5bc 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml index a2f12b0b27..a91ea4ce5c 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml index 60539d5960..f963658910 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml index 466d0dd8cd..195757a417 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml index 12bf9270d1..c63c260d25 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml index 9d1070140b..d6cc708aa1 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml index 4cd9da34f0..23739a1ac5 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml index dd87dc175b..817b855512 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml index 16ea40c33e..29d59ce111 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml index b01ede7936..e99d57b816 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml index 8d38bb748c..8e57ec7258 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml index f4589831a8..67b706e0ae 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml index 138e0207c7..d9c1b2a35c 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml index 0e2cf8ef23..ceafb3fc67 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml index bbc8272c65..e5090a3572 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml index ba5cd7a171..9e8bdebd4a 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml index b835be4057..a4e0cc1445 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml index 93196268e7..d442637062 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/original.cml b/lib/iris/tests/results/cube_collapsed/original.cml index 10a81f21d2..4bc6553dba 100644 --- a/lib/iris/tests/results/cube_collapsed/original.cml +++ b/lib/iris/tests/results/cube_collapsed/original.cml @@ -96,7 +96,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml index a4f2cc6084..788d0d8029 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml index 885328a856..b9b74c6b6d 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml index 273ad909d9..84b4fea150 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml index c2e2993874..128d29a281 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml index 4d6e85f8a8..8c206fe840 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml index 8f7ccf9b8a..08dc52fca2 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml index 33b35b7eaa..5fae922867 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml @@ -43,7 +43,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml index c3db78bd9e..454bd29a18 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml @@ -43,7 +43,7 @@ - + diff --git a/lib/iris/tests/results/cube_io/pickling/cubelist.cml b/lib/iris/tests/results/cube_io/pickling/cubelist.cml index 6cebe384aa..eb839e36e4 100644 --- a/lib/iris/tests/results/cube_io/pickling/cubelist.cml +++ b/lib/iris/tests/results/cube_io/pickling/cubelist.cml @@ -400,7 +400,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -528,7 +528,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_io/pickling/single_cube.cml b/lib/iris/tests/results/cube_io/pickling/single_cube.cml index 2cd3dbb3cb..a025713766 100644 --- a/lib/iris/tests/results/cube_io/pickling/single_cube.cml +++ b/lib/iris/tests/results/cube_io/pickling/single_cube.cml @@ -400,7 +400,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_io/pickling/theta.cml b/lib/iris/tests/results/cube_io/pickling/theta.cml index 39ee6aecfd..6c69f6ed54 100644 --- a/lib/iris/tests/results/cube_io/pickling/theta.cml +++ b/lib/iris/tests/results/cube_io/pickling/theta.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_io/pp/load/global.cml b/lib/iris/tests/results/cube_io/pp/load/global.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/cube_io/pp/load/global.cml +++ b/lib/iris/tests/results/cube_io/pp/load/global.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml index 3f9e5fef9e..f272cebeb1 100644 --- a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml +++ b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml @@ -9,15 +9,15 @@ - + - + - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml index 9e5b5a57db..b1bf424a93 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml @@ -11,7 +11,7 @@ - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml index 061255bbe4..50fd683cb3 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml @@ -11,7 +11,7 @@ - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml index 2f899b333e..1563dce74d 100644 --- a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml +++ b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml @@ -11,7 +11,7 @@ - + @@ -25,7 +25,7 @@ - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml index 1c1e58c02b..5b7d800716 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml @@ -3,7 +3,7 @@ - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml index 02d380a097..edf4392d30 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml @@ -13,7 +13,7 @@ - + diff --git a/lib/iris/tests/results/derived/column.cml b/lib/iris/tests/results/derived/column.cml index e4402b4e4d..827214dafa 100644 --- a/lib/iris/tests/results/derived/column.cml +++ b/lib/iris/tests/results/derived/column.cml @@ -111,7 +111,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/no_orog.cml b/lib/iris/tests/results/derived/no_orog.cml index ec0ffdd5ff..844373675e 100644 --- a/lib/iris/tests/results/derived/no_orog.cml +++ b/lib/iris/tests/results/derived/no_orog.cml @@ -136,7 +136,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_derived_coord.cml b/lib/iris/tests/results/derived/removed_derived_coord.cml index 12feb2b643..5175d88875 100644 --- a/lib/iris/tests/results/derived/removed_derived_coord.cml +++ b/lib/iris/tests/results/derived/removed_derived_coord.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_orog.cml b/lib/iris/tests/results/derived/removed_orog.cml index 4c30ec69bc..982e38fd1e 100644 --- a/lib/iris/tests/results/derived/removed_orog.cml +++ b/lib/iris/tests/results/derived/removed_orog.cml @@ -122,7 +122,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_sigma.cml b/lib/iris/tests/results/derived/removed_sigma.cml index ea34680b7d..3908c22188 100644 --- a/lib/iris/tests/results/derived/removed_sigma.cml +++ b/lib/iris/tests/results/derived/removed_sigma.cml @@ -462,7 +462,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/transposed.cml b/lib/iris/tests/results/derived/transposed.cml index eef077d774..c44857bd61 100644 --- a/lib/iris/tests/results/derived/transposed.cml +++ b/lib/iris/tests/results/derived/transposed.cml @@ -498,7 +498,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml index 1bb899c558..c7200d6106 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml @@ -31,7 +31,7 @@ - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml index 585657b642..b41c0e48c7 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml @@ -66,7 +66,7 @@ [0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml index 4e928851fe..8617be9372 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml @@ -60,7 +60,7 @@ [0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml index 31a753c059..70df0e198d 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml @@ -429,7 +429,7 @@ 218.732, 216.367]]" shape="(16, 21)" standard_name="surface_altitude" units="Unit('m')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml index be79f3ff57..b863adcf55 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml @@ -34,7 +34,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml index 568c835e97..b46908a648 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml @@ -53,7 +53,7 @@ 16800.0, 17100.0, 17400.0, 17700.0, 18000.0, 18300.0, 18600.0, 18900.0, 19200.0, 19500.0, 19800.0, 20100.0, 20400.0, 20700.0, 21000.0, - 21300.0, 21600.0]" shape="(72,)" standard_name="time" units="Unit('seconds since 2016-01-01 15:00:00', calendar='gregorian')" value_type="float64" var_name="time_instant"> + 21300.0, 21600.0]" shape="(72,)" standard_name="time" units="Unit('seconds since 2016-01-01 15:00:00', calendar='standard')" value_type="float64" var_name="time_instant"> diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml index 6d7873daaa..57209e4ba7 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml @@ -45,7 +45,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml index b664e3cf6f..c260587921 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml @@ -45,7 +45,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml index b30d443495..e545e05fdc 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml @@ -34,7 +34,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml index 157755298d..4eedfc21b3 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml @@ -34,7 +34,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml index a9eba1a80d..55155047bb 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml @@ -34,7 +34,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml index e90c048803..fc52fce0b3 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml @@ -34,7 +34,7 @@ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/file_load/theta_levels.cml b/lib/iris/tests/results/file_load/theta_levels.cml index b4ae2a4b35..fc708b7949 100644 --- a/lib/iris/tests/results/file_load/theta_levels.cml +++ b/lib/iris/tests/results/file_load/theta_levels.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -92,7 +92,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -143,7 +143,7 @@ - + @@ -164,7 +164,7 @@ - + @@ -194,7 +194,7 @@ - + @@ -215,7 +215,7 @@ - + @@ -245,7 +245,7 @@ - + @@ -266,7 +266,7 @@ - + @@ -296,7 +296,7 @@ - + @@ -317,7 +317,7 @@ - + @@ -347,7 +347,7 @@ - + @@ -368,7 +368,7 @@ - + @@ -398,7 +398,7 @@ - + @@ -419,7 +419,7 @@ - + @@ -449,7 +449,7 @@ - + @@ -470,7 +470,7 @@ - + @@ -500,7 +500,7 @@ - + @@ -521,7 +521,7 @@ - + @@ -551,7 +551,7 @@ - + @@ -572,7 +572,7 @@ - + @@ -602,7 +602,7 @@ - + @@ -623,7 +623,7 @@ - + @@ -653,7 +653,7 @@ - + @@ -674,7 +674,7 @@ - + @@ -704,7 +704,7 @@ - + @@ -725,7 +725,7 @@ - + @@ -755,7 +755,7 @@ - + @@ -776,7 +776,7 @@ - + @@ -806,7 +806,7 @@ - + @@ -827,7 +827,7 @@ - + @@ -857,7 +857,7 @@ - + @@ -878,7 +878,7 @@ - + @@ -908,7 +908,7 @@ - + @@ -929,7 +929,7 @@ - + @@ -959,7 +959,7 @@ - + @@ -980,7 +980,7 @@ - + @@ -1010,7 +1010,7 @@ - + @@ -1031,7 +1031,7 @@ - + @@ -1061,7 +1061,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1112,7 +1112,7 @@ - + @@ -1133,7 +1133,7 @@ - + @@ -1163,7 +1163,7 @@ - + @@ -1184,7 +1184,7 @@ - + @@ -1214,7 +1214,7 @@ - + @@ -1235,7 +1235,7 @@ - + @@ -1265,7 +1265,7 @@ - + @@ -1286,7 +1286,7 @@ - + @@ -1316,7 +1316,7 @@ - + @@ -1337,7 +1337,7 @@ - + @@ -1367,7 +1367,7 @@ - + @@ -1388,7 +1388,7 @@ - + @@ -1418,7 +1418,7 @@ - + @@ -1439,7 +1439,7 @@ - + @@ -1469,7 +1469,7 @@ - + @@ -1490,7 +1490,7 @@ - + @@ -1520,7 +1520,7 @@ - + @@ -1541,7 +1541,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1592,7 +1592,7 @@ - + @@ -1622,7 +1622,7 @@ - + @@ -1643,7 +1643,7 @@ - + @@ -1673,7 +1673,7 @@ - + @@ -1694,7 +1694,7 @@ - + @@ -1724,7 +1724,7 @@ - + @@ -1745,7 +1745,7 @@ - + @@ -1775,7 +1775,7 @@ - + @@ -1796,7 +1796,7 @@ - + @@ -1826,7 +1826,7 @@ - + @@ -1847,7 +1847,7 @@ - + @@ -1877,7 +1877,7 @@ - + @@ -1898,7 +1898,7 @@ - + @@ -1928,7 +1928,7 @@ - + diff --git a/lib/iris/tests/results/file_load/u_wind_levels.cml b/lib/iris/tests/results/file_load/u_wind_levels.cml index 68a3b45f07..5d1af58f6c 100644 --- a/lib/iris/tests/results/file_load/u_wind_levels.cml +++ b/lib/iris/tests/results/file_load/u_wind_levels.cml @@ -11,7 +11,7 @@ - + @@ -42,7 +42,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -94,7 +94,7 @@ - + @@ -115,7 +115,7 @@ - + @@ -146,7 +146,7 @@ - + @@ -167,7 +167,7 @@ - + @@ -198,7 +198,7 @@ - + @@ -219,7 +219,7 @@ - + @@ -250,7 +250,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -302,7 +302,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -354,7 +354,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -406,7 +406,7 @@ - + @@ -427,7 +427,7 @@ - + @@ -458,7 +458,7 @@ - + @@ -479,7 +479,7 @@ - + @@ -510,7 +510,7 @@ - + @@ -531,7 +531,7 @@ - + @@ -562,7 +562,7 @@ - + @@ -583,7 +583,7 @@ - + @@ -614,7 +614,7 @@ - + @@ -635,7 +635,7 @@ - + @@ -666,7 +666,7 @@ - + @@ -687,7 +687,7 @@ - + @@ -718,7 +718,7 @@ - + @@ -739,7 +739,7 @@ - + @@ -770,7 +770,7 @@ - + @@ -791,7 +791,7 @@ - + @@ -822,7 +822,7 @@ - + @@ -843,7 +843,7 @@ - + @@ -874,7 +874,7 @@ - + @@ -895,7 +895,7 @@ - + @@ -926,7 +926,7 @@ - + @@ -947,7 +947,7 @@ - + @@ -978,7 +978,7 @@ - + @@ -999,7 +999,7 @@ - + @@ -1030,7 +1030,7 @@ - + @@ -1051,7 +1051,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1103,7 +1103,7 @@ - + @@ -1134,7 +1134,7 @@ - + @@ -1155,7 +1155,7 @@ - + @@ -1186,7 +1186,7 @@ - + @@ -1207,7 +1207,7 @@ - + @@ -1238,7 +1238,7 @@ - + @@ -1259,7 +1259,7 @@ - + @@ -1290,7 +1290,7 @@ - + @@ -1311,7 +1311,7 @@ - + @@ -1342,7 +1342,7 @@ - + @@ -1363,7 +1363,7 @@ - + @@ -1394,7 +1394,7 @@ - + @@ -1415,7 +1415,7 @@ - + @@ -1446,7 +1446,7 @@ - + @@ -1467,7 +1467,7 @@ - + @@ -1498,7 +1498,7 @@ - + @@ -1519,7 +1519,7 @@ - + @@ -1550,7 +1550,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1602,7 +1602,7 @@ - + @@ -1623,7 +1623,7 @@ - + @@ -1654,7 +1654,7 @@ - + @@ -1675,7 +1675,7 @@ - + @@ -1706,7 +1706,7 @@ - + @@ -1727,7 +1727,7 @@ - + @@ -1758,7 +1758,7 @@ - + @@ -1779,7 +1779,7 @@ - + @@ -1810,7 +1810,7 @@ - + @@ -1831,7 +1831,7 @@ - + @@ -1862,7 +1862,7 @@ - + @@ -1883,7 +1883,7 @@ - + @@ -1914,7 +1914,7 @@ - + @@ -1935,7 +1935,7 @@ - + @@ -1966,7 +1966,7 @@ - + diff --git a/lib/iris/tests/results/file_load/v_wind_levels.cml b/lib/iris/tests/results/file_load/v_wind_levels.cml index 9ccdade1bd..c7145a7e9e 100644 --- a/lib/iris/tests/results/file_load/v_wind_levels.cml +++ b/lib/iris/tests/results/file_load/v_wind_levels.cml @@ -11,7 +11,7 @@ - + - + @@ -63,7 +63,7 @@ - + - + @@ -115,7 +115,7 @@ - + - + @@ -167,7 +167,7 @@ - + - + @@ -219,7 +219,7 @@ - + - + @@ -271,7 +271,7 @@ - + - + @@ -323,7 +323,7 @@ - + - + @@ -375,7 +375,7 @@ - + - + @@ -427,7 +427,7 @@ - + - + @@ -479,7 +479,7 @@ - + - + @@ -531,7 +531,7 @@ - + - + @@ -583,7 +583,7 @@ - + - + @@ -635,7 +635,7 @@ - + - + @@ -687,7 +687,7 @@ - + - + @@ -739,7 +739,7 @@ - + - + @@ -791,7 +791,7 @@ - + - + @@ -843,7 +843,7 @@ - + - + @@ -895,7 +895,7 @@ - + - + @@ -947,7 +947,7 @@ - + - + @@ -999,7 +999,7 @@ - + - + @@ -1051,7 +1051,7 @@ - + - + @@ -1103,7 +1103,7 @@ - + - + @@ -1155,7 +1155,7 @@ - + - + @@ -1207,7 +1207,7 @@ - + - + @@ -1259,7 +1259,7 @@ - + - + @@ -1311,7 +1311,7 @@ - + - + @@ -1363,7 +1363,7 @@ - + - + @@ -1415,7 +1415,7 @@ - + - + @@ -1467,7 +1467,7 @@ - + - + @@ -1519,7 +1519,7 @@ - + - + @@ -1571,7 +1571,7 @@ - + - + @@ -1623,7 +1623,7 @@ - + - + @@ -1675,7 +1675,7 @@ - + - + @@ -1727,7 +1727,7 @@ - + - + @@ -1779,7 +1779,7 @@ - + - + @@ -1831,7 +1831,7 @@ - + - + @@ -1883,7 +1883,7 @@ - + - + @@ -1935,7 +1935,7 @@ - + - + diff --git a/lib/iris/tests/results/file_load/wind_levels.cml b/lib/iris/tests/results/file_load/wind_levels.cml index 96d821fc1c..33584deec6 100644 --- a/lib/iris/tests/results/file_load/wind_levels.cml +++ b/lib/iris/tests/results/file_load/wind_levels.cml @@ -11,7 +11,7 @@ - + @@ -42,7 +42,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -94,7 +94,7 @@ - + @@ -115,7 +115,7 @@ - + @@ -146,7 +146,7 @@ - + @@ -167,7 +167,7 @@ - + @@ -198,7 +198,7 @@ - + @@ -219,7 +219,7 @@ - + @@ -250,7 +250,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -302,7 +302,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -354,7 +354,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -406,7 +406,7 @@ - + @@ -427,7 +427,7 @@ - + @@ -458,7 +458,7 @@ - + @@ -479,7 +479,7 @@ - + @@ -510,7 +510,7 @@ - + @@ -531,7 +531,7 @@ - + @@ -562,7 +562,7 @@ - + @@ -583,7 +583,7 @@ - + @@ -614,7 +614,7 @@ - + @@ -635,7 +635,7 @@ - + @@ -666,7 +666,7 @@ - + @@ -687,7 +687,7 @@ - + @@ -718,7 +718,7 @@ - + @@ -739,7 +739,7 @@ - + @@ -770,7 +770,7 @@ - + @@ -791,7 +791,7 @@ - + @@ -822,7 +822,7 @@ - + @@ -843,7 +843,7 @@ - + @@ -874,7 +874,7 @@ - + @@ -895,7 +895,7 @@ - + @@ -926,7 +926,7 @@ - + @@ -947,7 +947,7 @@ - + @@ -978,7 +978,7 @@ - + @@ -999,7 +999,7 @@ - + @@ -1030,7 +1030,7 @@ - + @@ -1051,7 +1051,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1103,7 +1103,7 @@ - + @@ -1134,7 +1134,7 @@ - + @@ -1155,7 +1155,7 @@ - + @@ -1186,7 +1186,7 @@ - + @@ -1207,7 +1207,7 @@ - + @@ -1238,7 +1238,7 @@ - + @@ -1259,7 +1259,7 @@ - + @@ -1290,7 +1290,7 @@ - + @@ -1311,7 +1311,7 @@ - + @@ -1342,7 +1342,7 @@ - + @@ -1363,7 +1363,7 @@ - + @@ -1394,7 +1394,7 @@ - + @@ -1415,7 +1415,7 @@ - + @@ -1446,7 +1446,7 @@ - + @@ -1467,7 +1467,7 @@ - + @@ -1498,7 +1498,7 @@ - + @@ -1519,7 +1519,7 @@ - + @@ -1550,7 +1550,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1602,7 +1602,7 @@ - + @@ -1623,7 +1623,7 @@ - + @@ -1654,7 +1654,7 @@ - + @@ -1675,7 +1675,7 @@ - + @@ -1706,7 +1706,7 @@ - + @@ -1727,7 +1727,7 @@ - + @@ -1758,7 +1758,7 @@ - + @@ -1779,7 +1779,7 @@ - + @@ -1810,7 +1810,7 @@ - + @@ -1831,7 +1831,7 @@ - + @@ -1862,7 +1862,7 @@ - + @@ -1883,7 +1883,7 @@ - + @@ -1914,7 +1914,7 @@ - + @@ -1935,7 +1935,7 @@ - + @@ -1966,7 +1966,7 @@ - + @@ -1987,7 +1987,7 @@ - + - + @@ -2039,7 +2039,7 @@ - + - + @@ -2091,7 +2091,7 @@ - + - + @@ -2143,7 +2143,7 @@ - + - + @@ -2195,7 +2195,7 @@ - + - + @@ -2247,7 +2247,7 @@ - + - + @@ -2299,7 +2299,7 @@ - + - + @@ -2351,7 +2351,7 @@ - + - + @@ -2403,7 +2403,7 @@ - + - + @@ -2455,7 +2455,7 @@ - + - + @@ -2507,7 +2507,7 @@ - + - + @@ -2559,7 +2559,7 @@ - + - + @@ -2611,7 +2611,7 @@ - + - + @@ -2663,7 +2663,7 @@ - + - + @@ -2715,7 +2715,7 @@ - + - + @@ -2767,7 +2767,7 @@ - + - + @@ -2819,7 +2819,7 @@ - + - + @@ -2871,7 +2871,7 @@ - + - + @@ -2923,7 +2923,7 @@ - + - + @@ -2975,7 +2975,7 @@ - + - + @@ -3027,7 +3027,7 @@ - + - + @@ -3079,7 +3079,7 @@ - + - + @@ -3131,7 +3131,7 @@ - + - + @@ -3183,7 +3183,7 @@ - + - + @@ -3235,7 +3235,7 @@ - + - + @@ -3287,7 +3287,7 @@ - + - + @@ -3339,7 +3339,7 @@ - + - + @@ -3391,7 +3391,7 @@ - + - + @@ -3443,7 +3443,7 @@ - + - + @@ -3495,7 +3495,7 @@ - + - + @@ -3547,7 +3547,7 @@ - + - + @@ -3599,7 +3599,7 @@ - + - + @@ -3651,7 +3651,7 @@ - + - + @@ -3703,7 +3703,7 @@ - + - + @@ -3755,7 +3755,7 @@ - + - + @@ -3807,7 +3807,7 @@ - + - + @@ -3859,7 +3859,7 @@ - + - + @@ -3911,7 +3911,7 @@ - + - + diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 79560a5365..e5c2ad863a 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -1,1080 +1,242 @@ { - "gallery_tests.test_plot_COP_1d.TestCOP1DPlot.test_plot_COP_1d.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/baff589936602d8ec977334ae4dac9b61a6dc4d99532c86cc2913e36c4cc0f61.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33.png" - ], - "gallery_tests.test_plot_COP_maps.TestCOPMaps.test_plot_cop_maps.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9138db95668524913e6ac168997e85957e917e876396b96a81b5ce3c496935.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913c6ac178995b0d956e917ec76396b96a853dcf94696935.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94796931.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913d6ac168991f0d956e917ec76396b96a853dcf94696935.png" - ], - "gallery_tests.test_plot_SOI_filtering.TestSOIFiltering.test_plot_soi_filtering.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fac460b9c17b78723e05a5a9954edaf062332799954e9ca5c63b9a52d24e5a95.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8460b9c17b78723e05a5a9954edaf062333799954e9ca5c63b9a52d24e4a9d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa167295c5e0696a3c17a58c9568da536233da19994cdab487739b4b9b444eb5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4.png" - ], - "gallery_tests.test_plot_TEC.TestTEC.test_plot_TEC.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9a42846e9a49c7596e3cce6c907b3a83c17e1b8239b3e4f33bc4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9e43846e9a49c7596e2cce6c907b3a83c16e1b9231b3e4f33b8c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c.png" - ], - "gallery_tests.test_plot_anomaly_log_colouring.TestAnomalyLogColouring.test_plot_anomaly_log_colouring.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ec4464e185a39f93931e9b1e91696d2949dde6e63e26a47a5ad391938d9a5a0c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ecc164e78e979b19b3789b0885a564a56cc2c65e3ec69469db1bdb9a853c1e24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ece164e68e979b19b3781b0885a564a56ccac65e3ec69469db1bdb9a853c1e24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c.png" - ], - "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc5d08fcd00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc7c09f4d00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8a60536bd28e1320739437b5f437b0a53d66f4cc5c08f4d00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9fc060f462a08f07203ebc77a1f36707e61f4e38d8f7d08a910197fc877cec58.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58.png" - ], - "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6eaa57e6e81ddf999311ba3b3775e20845d5889c199673b4e22a4675e8ca11c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991f1322b3761e06845718d89995b3131f32a4765ec2a1cd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc.png" - ], - "gallery_tests.test_plot_coriolis.TestCoriolisPlot.test_plot_coriolis.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e78665de9a699659e55e9965886979966986c5e63e98c19e3a256679e1981a24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e68665de9a699659c1fe99a5896965966996c46e3e19c1da3a652669c51e1a26.png" - ], - "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95317b9562e4d1649f5a05856e4ca4da52947e4ea5f13f1b499d42f13b1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea91b17b9562e4d1609f5a05856e4ca45a52957e5ea5f13b1bca9dc0b17b1ac1.png" - ], - "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394069921e93f07f4aad856cc47e4e95857a1ea5da3591ba1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394068931e9be07e4aa5856cc47e4a91957a1ba55bb5b17a3b81.png" - ], - "gallery_tests.test_plot_custom_aggregation.TestCustomAggregation.test_plot_custom_aggregation.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee816f81917e907eb03ec73f856f7ac198d070186e90811f1be33ee1a57a6e18.png" - ], - "gallery_tests.test_plot_custom_file_loading.TestCustomFileLoading.test_plot_custom_file_loading.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0cbf1845e34be913787416edcc8bc3bc81f9b63332662a4ed30cdc1b2cd21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fba0cbf1845e34be912787416edcc8bc3b881f9b62332762a5ad32cdc1b2cd21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa1cb47845e34bc912797436cccc8343f11359b73523746c48c72d9d9b34da5.png" - ], - "gallery_tests.test_plot_deriving_phenomena.TestDerivingPhenomena.test_plot_deriving_phenomena.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/b9993986866952e6c9464639c4766bd9c669916e7b99c1663f99768990763e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b99139de866952e6c946c639c47e6bd18769d16e7a9981662e813699d0763e89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec97681793689768943c97e8926669d186e8c33f6c99c32e6b936c83d33e2c98.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec97681793689768943c96e890666bc586e1c33f2c99c33e6f956c93d23e2c98.png" - ], - "gallery_tests.test_plot_global_map.TestGlobalMap.test_plot_global_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa9979468566857ef07e3e8978566b91cb0179883c89946686a96b9d83766f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa997b958466846ed13e87467a997a898d66d17e2cc9906684696f99d3162f81.png" - ], - "gallery_tests.test_plot_hovmoller.TestGlobalMap.test_plot_hovmoller.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bab430b4ce4bce43c5becf89c54b1a63c543c56e1e64907e3bb469b490de1ac1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeb46cb4934b934bc07e974bc14b38949943c0fe3e94c17f6ea46cb4c07b3f00.png" - ], - "gallery_tests.test_plot_inset.TestInsetPlot.test_plot_inset.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992f50096a5b245dac4f6559496b49248dbc95dcb699529912dcf244a54.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9ff6992b50096a5b245dac4f64594b6b49248dbc95dcb699529952dcf244a56.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992b50096ad9267dac4d64094b294924cdbc95d4b699d29952dcda46e94.png" - ], - "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31e1c44e64e4b0459b5bb1716ecac464f496ce34618eb1079b39b193ce25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31b1c44e64e4b1579b5b917133cecc61f146c414668eb1119b1bb197ce34.png" - ], - "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abfef958fd462c993a07d87960464b81d1009687c139d3b594e9cf87c6b89687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aafec5e9e5e03e099a07e0f86542db879438261ec3b13ce78d8dc65a92d83d89.png" - ], - "gallery_tests.test_plot_lineplot_with_legend.TestLineplotWithLegend.test_plot_lineplot_with_legend.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eae942526540b869961f8da694589da69543cc9af1014afbc3fd596b84fe19a7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eae942146540b869961f8de694589da69543cc9af1014afbc3fd596b84fe19a7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d.png" - ], - "gallery_tests.test_plot_load_nemo.TestLoadNemo.test_plot_load_nemo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ff34e87f0049496d17c4d9c04fc225d256971392d39f1696df0f16cec00f36.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb11731a94cea4ee64b35e91d1d2304e9e5ac7397b20e1fe12852487e666ce46.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a665a69a599659e5db1865c2653b869996cce63e99e19a1a912639e7181e65.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e58661969e799659c1f719a6c867359a1996c0773649c09c3e612679c07b3f66.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f2c464ce9e399332e1b74ce1cc79338c6586e5b33b31b37a66c9664cc06e1a64.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a58660ce9e739b31c93d1cc9c8df33863383e33b3f11c03f2664366cc8ee3cc1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a58660ce9e739b31c93d1c89c8df33863783e23b3f11c07f2664366cc8ee3cc1.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a83846ea46ce539c93391de32cc86cf87a33fa168721cdb3e896e374b04.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be817a87845ea56cec79817a919e338436a5c1e73fa16c736c4a3e816a1e6b1c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be817a8784dea56cec79817a919e338437a5c1e73fa16c726c4a3e816a1c6b1c.png" - ], - "gallery_tests.test_plot_polar_stereo.TestPolarStereo.test_plot_polar_stereo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e615ec7e097ad961f9cb190f038e091c2c1e73f07c11f6f386b3cc1793e01.png" - ], - "gallery_tests.test_plot_polynomial_fit.TestPolynomialFit.test_plot_polynomial_fit.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abff4a9df26435886520c97f12414695c4b69d23934bc86adc969237d68ccc6f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aaff4a9df26435886520c97f12414695c4b69d23934bc86adc969a17d69ccc6f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aeffcb34d244348be5a2c96c3a4fc6d0c4b69f2d87294ccb9f1a125684cd7c11.png" - ], - "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb179325ca7c665b0c938cb4b4e719e9cb727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fac54f19851a30e4cc76cd0bb179325cb78665b0c938cb4b4e719e9c9727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb7c664b0c938cb4bce739e9c37a3b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb78665b1c938c94bce739e9c3727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb0f932dca7c665b1c92ccb4b4ed19e9c3721b5c8.png" - ], - "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e3856d999c389662734331afcd2d5a7184dba492b9b69b64d26dc29974b185b2.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa15615e97a193adc15e1e81c4fa3eb49d30817e3e05c17e7ba59927817e1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee46607e97a19781c0df1f81d0bb3e241f20c16f3fc0c1fe39263d33d06f3e80.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba056717c3e099e9b90f8e81c4da589499b696763e45e56b3b893929c17b7e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2697e1c0ff3f00c17e6b266c17c07f3f00.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a191a1b85e9e81c4da58909996b37e3a65e16f7b817939e57a1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a193a1b85e9e81c4da58909996b3763a65e16f7b816939ed7a1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a697e97a18681c6da9f8190bf3e263624c1ef3b48c17a2b223c47c0ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2497e1c0ef3f01c17e6b366c17c07b3f01.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172d0847ecd2bc913939c36846c714933799cc3cc8727e67639f939996a58.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172c6857ecd38cb3392ce36c564311931d85ec64e9787719a39993c316e66.png" - ], - "gallery_tests.test_plot_wind_barbs.TestWindBarbs.test_wind_barbs.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169316c1fe9e96c29e36739e13c07c3d61c07f39a13921c07f3e21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e161e996169316c1fe9e96c29e36739e13c07c3d61c07f39813929c07f3f01.png" - ], - "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cd3e29847b18e639e6c14f1a09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1fe9e96c29e36739e13c06c3d61c07f39a139e1c07f3f01.png" - ], - "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cc3e29847b38e639e6c14f1a09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1ee9e96c29e36739653c06c3d61c07f3da139e1c07f3f01.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81957ac17e6a85817e6a85857e942a3e81957a7e81917a7a81d95ec17e2ca1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81c17e817e3e81817e7e81857e7e817e81c07e7e81c17e7a81817e817e8c2a.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be81c17ec17e7e81c17e3e81c57ea55a3e80c17e3e81c1fe7a81c285c95f2c03.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81857e817e6a85817e7a81857e7e817e81957a7e81817e7a81817e817e843e.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea857a81957a857e957ec17e817e6a816a853e817a853e816e818d3a862ad3fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be81857ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2b.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_northpolarstereo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e59661969e699659c0f719a6c967339a1992c07f3649c09c3f612669c07b3f66.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_platecarree.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee816299954a1da699b6915ec25b6e419729c42c3f84bd9fe6d262d1d1dac076.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856299954a1da699b6915ec25b6e419729c42c3f84bd8fa7d262d1d1dac076.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test2dContour.test_2d_coords_contour.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/b4b2643ecb05cb43b0f23d80c53c4e1d3e5990eb1f81c19f2f983cb1c4ff3e42.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eb036726c47c9273918e6e2c6f216336787590eb969a165890ee6c676925b3b3.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon_on_polar_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e66d673c999031cd6667663398dc332c676364e798959336636660d933998666.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_rotated_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eba037a4c479c273b2963f2c6f6126966865d86f969e33c9b1706c26692793b0.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_1d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ac334934d2e65c72596325b343338cb41c92d9c5b36f65330d379692ca6d6c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_2d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7acb36134d2e676627963259343330cb43e92d9c5336e67330d379292ca6d6c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb8d4f21c472b27e919d2e216f216b3178e69c7e961ab39a84696c616d245b94.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon_on_polar_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e66c6619999933666666c6d99999336663646d9999c1332667b60cf964d8672c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_rotated_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eba925a5c476d25a95a56b876f3826246a449c6b96a3731ab13f6c656a5cb48a.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_1d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ac24947259f3493697632df45926b6e126c4f392593b4937266f26ccf032d8.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_2d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/afac26367251d3493617632df45c26a6e126c6f392593b4937266f26ccf232d0.png" - ], - "iris.tests.test_analysis.TestProject.test_cartopy_projection.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/9e1952c9c165b4fc668a9d47c1461d7a60fb2e853eb426bd62fd229c9f04c16d.png" - ], - "iris.tests.test_mapping.TestBasic.test_contourf.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a69cc96ad92e193c9963385929e1cc3819acde6d965ce6e666b30386e65b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c.png" - ], - "iris.tests.test_mapping.TestBasic.test_pcolor.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a69c896a592e59bc99e3384929636c32d98cde6d964ce7e666332386465b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a347c96858b8d9685c9c39696c393966c634969ce3c64697a3864697b3c9c.png" - ], - "iris.tests.test_mapping.TestBasic.test_unmappable.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa5684eb54a947ad09eb731c521978dc2fb1cc0e4966ce26e2c6b2d3a6e691a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853e48957ac1df957ac8be852bc1b1944e7a9878e03f4c6a253e6c7a912dc2.png" - ], - "iris.tests.test_mapping.TestBoundedCube.test_grid.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81917e857e6e81857e7a857a81917a7a81857e857e7e81857e7a817a81852e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a81817e7a81857e857e7a81857e7a817a81857e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a81857a7a81857e857e7a85857e7a817a81857a.png" - ], - "iris.tests.test_mapping.TestBoundedCube.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81e535857e92ca8ec23d21b13ce15e7a811ea5c47e1a5ac17b652d3b05e4f2.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81c17a857e1ea5857e634a7a81cd257e8584da857e3b29817e68f47a81c791.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81c17a857e1ea5857e734a7a81cd257e8484da857e3b29817a68f47a81c799.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_grid.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf80e2b1c17f1d0ac4f7c8d739a637202749699b6bb3ce3666e4b048944d9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf80e2f1c17f1d0ac457c8d619a637213749699b6bb34e3666e4b04e944d9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05392995bac6d691ce3f21666569d86a96c6360ee195cb91e8ce54953b313b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05392995bac6d691ea3f21666569d86a97c6320ee195cb91e8ce559539391b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885ea7a1785fa7a157a177a017a1585e817a885ea85e86a1785fa7a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a817a817a81817e7a81857e857e857e857e7a81.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_outline.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e80857e7a817a817a817a81817f7a81857e857e857e857e7a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e21857e7a817a817a857a81857a7a81857a857e857a857e7a84.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885e87a1785fa7a177a177e807a1585e85fa0857a85e86817857f6a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885e86a1785fa7a177a177e807a1585e817a885ea85e86817857f7a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e81857e7a857a817e817a81857a7a81817e857e857a857e7a81.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf81e6b1c17e1d4884bfc8df39a43720374969db69b34e26c4e4b0ca904f9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57396995a8c6d691ea3f25664569d86b16c63686ed958991ea4a549531393b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57396995a8c6d691ea3e25664569d96b16c63684e9958b91ea4a559431793b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea813b49957ec4b7917e3f60266978d97a9562366e81954a914ec6cc957a0f98.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_scatter.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea053d2e916ac2d9c4d894346b24f3477acf68ad39329ed8c696e136c1ab9a71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea053d2e916ac2d9c4d895346b2473477acf68ad39329ed8c69ee126c1ab9a71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05bd2e916ac2d984983d346b2473477acf69ad3d3296d8c696e126c1ab1e71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05bd3a91eac2d984983d346b2473477acf69ad1d3296d8c696e126c1ab1e71.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_keywords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a31871f7e856470c1fa9b8c7b81647384665b9ed1b998c1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_keywords.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea811831957fe3cea68c6ce0d9f29b9b6a816463953e61cc917f1ae36ac09d38.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa819097857e6560957e7bcc7a819c316e81951e857e62c281fe79a17aa19637.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee819cb7913b63c8846e64737bb1999c6ec52633953a69c8916f6c636e92911c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8190be857e6739917a7bc47a8594337bb1911c857e6ec3913279007e819637.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a31871f7e856470c1fa9b8c7b81647384665b9ed1b998c1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea811831957ae3cea68c6ce0c9f39b9b6a816473953e63cc917f1ae36ac09d38.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81909f857e6520957e5bcc7a8194716e31851e857e6ac281fe3f817a81963f.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eae0943295154bcc844e6c314fb093ce7bc7c4b3a4307bc4916f3f316ed2b4ce.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c3ea1e55e856658a5c11837096e8fe17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c36a1e55e854658b5c13837096e8fe17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e558855fd9e7857a1ab16a85a51d36a1e55a854e58a5c13837096e8fe17a.png" - ], - "iris.tests.test_mapping.TestMappingSubRegion.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bd913e01d07ee07e926e87876f8196c1e0d36967393c1f181e2c3cb8b0f960d7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5b2f00392cb3496695621d34db6c92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5a2f003924b3496695e21db4db6c92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195aecf5b2f00392cb3496495e21da4db6c92.png" - ], - "iris.tests.test_mapping.TestUnmappable.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe818d6ac17e5a958d7ab12b9d677615986e666dc4f20dea7281d98833889b22.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81b54a817eca35817ec701857e3e64943e7bb41b846f996e817e006ee1b19b.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2ff7c00a56de9023b52e4143da5d16d7ecad1b76f2094c963929c6471c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfec2d77e01a5a5ed013b4ac4521c94817d4e6d91ff63349c6d61991e3278cc.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfec2577e01b5a5ed013b4ac4521c94817d4e4d91ff63369c6d61991e3278cc.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff95776a01e1f67801cc36f4075b81c5437668c1167c88d2676d39d6867b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fff941e7e01e1c2f801c878a41e5b0d85cf36e1837e2d9992c62f21769e6a4d.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fbe0623dc9879d91b41e4b449b6579e78798a49b7872d2644b8c919b39306e6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c21ccd179dc3b05e4b689b0771b48698961b7962da446e8ca5bb36716c6e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff897066b41f076f81dce1fb007da79c50633e9c40626b8d1066df9d6067969.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff897066a01f0f2f818ee1eb007ca41853e3b81c57e36a991fe2ca9725e29ed.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7e0098757103a71ce4506dc3d11e7b20d2477ec094857db895217f6a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c2d73a09b4a76c099d26f14b0e5ad0d643b0d42763e9d51378f895867c39.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe8c0173a19b4066d599946f35f0ed5d0b74729d40369d8953678e897877879.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c0567a01b096e4019daff10b464bd4da6391943678e5879f7e3103e67f1c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c0567a01b296e4019d2ff10b464bd4da6391943678e5879f7e3903e63f1c.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e04256f68023352f6d61da5c109dec8d19bcf089cc9d99a9c85d999.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e06256f68023352f6d61da5c009decad19bcf089cc9d99a9c85d989.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e002427e801bb4ae65a1c94813dcec999db4bbc9ccd79991f3238cc.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe9dd77f00e1d73000cc1df707db8184427ef8d1367c88d2667d39d0866b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe9d977f41e1d73000cc1df707d98184427ef8d1367c88d2667d39d0866b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e259982ce2f215c9a626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e258982c66f215c9a6a6c.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fbe0623dc9879d91b41e4b449b6579e78798a49b7872d2644b8c919b39306e6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c21ccd179dc3b05e4b689b0771b48698961b7962da446e8ca5bb36716c6e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ffb5867f0060d4301f6d9fb007d899c50699e9c8668e78d8678d69de069969.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ffb79e7f0060d8303fcd1eb007d801c52699e18d769e2199e60ce1da5629ed.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7f90987720029f1ef458cd43811cdb60d647de609485ddb899215f62.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7f94987720009f1ef458cd43810cdb60d647de609485ddb89921df62.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd97c93734a778ce07c9f99b02731.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc8967e0098a6241f9d26e34b8e42f4d20bb4942759e9941f78f8d7867a39.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83f9c8967e009da6245f9946e25f9ed6f0940f29f40749d8853678e8d7857879.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc9d67e00909624079daef160cf4bd45a439184367ae5979f7e3119e6261c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc9d67e00909624059daef160cf4bd45a4b9184367ae5979f7e3909e6261c.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1947c99184e62669ca7f65bc96ab81d97b7e248199cc7913662d94ac5a1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1947c99184e62669ca7f65bc96ab81d97b7c248399cc7917662d84ac5a1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1b47c99184e62669ca7f65bc96ab81d97b7e248199cc7913662d84acda0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b2ecc1a8b9994a16e666b5e3ce151969a5fb4ed49909653990d46b9bfc097684.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bea07c99c15eb16e9891ce50c742394a3ced6cb13390f1cc73c29f1b2d0ecd66.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7f1f07f3e0e0f0211b9e066e074d83926ed8f8cd3792dad1964db0d80e9b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7f1f07f3e0e0f0311b9e066e074d839266d8e8cd379adad1964db0d80e9b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be852fc1e078c83eb30e3607672149c098d95c5b9e4636f2c1fc299d999f7e03.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a67b94c621deda3f69392cccd246db39018989ec4836de9ed249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a66b94c621deda3f69392cccd646db3901898dec4836de9cd249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a67b94c621ceda3f6d392cccd246db3901898dec4836de9cd249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edfa96cb9a256b4f65466d9892d9c865693a1a9c94b39ed8484b35ad9a864c32.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a4fb19b3db04c6cd6307b98678601c738c39d71cf3866186d8616e69bd191b9e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e8b33c129649c78de3a773e578650c728e92279be12de1edc4f246b2939c3b01.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bbfac39d9899384a6f6694a7b613cb489c95b7b7c24a399cc5913262d84acda0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b2ecc12999994e16e666b5e3ce171969a5fb4ed49909e53990c44b9b7c09f684.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bea07c99c15eb16e9891ce50c742394a3ced6cb13390f1cc73c29f1b2d0ecd66.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af7e1f0ff1e1e0f0d918960e6c076d8bd266d868c537365a90966db0de0e1b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7e1f0ff1e1e0f0d918960e6c076d83d266d868c537365ad0966db0de4e1b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be812fc1c078c03e930e3627672369c1d8d85c5b96463662e1fc699d9b9f7e03.png" - ], - "iris.tests.test_plot.Test1dScatter.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/edf896d79a67b94c651ced23d29392cccd646d33901912fcc4836d69ed249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edda96cb9a256b4765c26d9892dbc665693a1a9494b796c86c4b37ad92864c32.png" - ], - "iris.tests.test_plot.Test1dScatter.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/acf939339a16c64de306318638673c738c19d71cf3866186d8636e69bd191b9e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edb23c529649c78de38773e538650c729e92279be12de1edc4f246b2139c3b01.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_1d_positive_down.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87fef8117980c7c160078f1ffc049e7e90159a7a95419a7e910dcf1ece19ce3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fe781b708487c360079e3bb4789869816bdb64c76b4a3cce7b4e749a6130c5.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_1d_positive_up.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff85d47800bd9f660779d0863f49c9947f4e1e9141de38d700da28ce1d9a2b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff85d47a00bc9f660779d8863f49c9907f4e1e9141de38d708da28ce1d9a0b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff958b7a00b09c661761c9907fcb0d9163ce7895289a618f381bffccf97200.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_2d_positive_down.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb946ba684e194fb901b3a0587641ad03b1ae7674e64c15a5b99c767c47e3a98.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fb946ba484e194dbc01f3665c0e4399a3f0fc2653f90c99e3f613e64c81e3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fb966ba6846194dbd01f3665c0e4399a3f1bc2653f90c99e2f613e64c01e3f81.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_2d_positive_up.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee176c7f93e093a0c50f9383815e6e156859e17e6e15e17a9be08e2d851a9b83.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebc06be1941e941ec07f941f907f6fa0950fc07e6f80c07f6b806be1c07f3f80.png" - ], - "iris.tests.test_plot.TestContour.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/cff8a55f7a15b55a7817854ad007a5e8c04f3ce8c04f3e2ac4706ab295b37a96.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaece0173d17951fbd03974a914964e8c04a72e8c1531ee1cc746bb293973ecd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeece0173c07951fbd038748914964e8c14e72e9c1531ee1cc746bb293973ecd.png" - ], - "iris.tests.test_plot.TestContour.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfc815e78018597fc019b65b425d121955e7eda854b7d6a80db7eb481b72b61.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15ab4044a269546caa5956b7e9bc0b97f2cc2d62d360b363b49.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15af4055a069546caa5954b7e9bc0f97d2cc2d62d360b362b49.png" - ], - "iris.tests.test_plot.TestContour.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780185fff800955ad4027e00d517d400855f7e0085ff7e8085ff6aed.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780085fff800855fd4027e00d517d400855f7e0085ff7e8085ff6aed.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe817ffc00855ef0007e81d4027e80815fd56a03ff7a8085ff3aa883ff6aa5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bff817ffc00857ef0007a81d4027e80815fd56a03ff7a8085ff3aa881ff6aa5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe805ffc00857ef0007a01d4027e80815fd56a83ff7a8085ff3aaa03ff6af5.png" - ], - "iris.tests.test_plot.TestContour.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa56c3cc34e891b1c9a91c36c5a170e3c71b3e5993a784e492c49b4ecec76393.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999765cd3694b06478c7396329958434c2cecb6c6d69ce1b92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95a19999876d4d3694b06c78c7396329958434c2cecb6c6d69ce3b92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999e654d3694b26c78c7396329958434c2cacb6c6d69ce9392.png" - ], - "iris.tests.test_plot.TestContour.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe857f7a01a56afa05854ad015bd00d015d50a90577e80857f7ea0857f7abf.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe815ffc008554f8007e01d0027e808557d5ea815f7ea0817f2fea817d2aff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe805ffc008554f8007e01d0027e808557d5ea815f7ea0817f2eea817f2bff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe8057fc00855cf8007e01d0027e808557d5ea815f7ea0817f2fea815f2bff.png" - ], - "iris.tests.test_plot.TestContour.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bff81ff7a0195fcf8019578d4027e00d550d402857c7e0185fe7a8385fe6aaf.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff857ff8018578f8017a80d4027e00855ec42a81fe7a8185fe6a8f85fe6ab7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff8018578fc017a80d4027e00855ec42a81fe7a8185fe7a8f85fe6ab5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff801857afc017a80d4027e00855ec42a81fe7a8185fe6a8f05fe2abf.png" - ], - "iris.tests.test_plot.TestContourf.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa562ed68569d52857abd12953a8f12951f64e0d30f3ac96a4d6a696ee06a32.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea857a81957ac57e957a857a957a958ac5723b0d6ac56b833e856e606a923e90.png" - ], - "iris.tests.test_plot.TestContourf.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa5e03f957a4f80954a9e41e16e9c60970fb5b24ada634e6e93692d4ba562d8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea851f00957ac0f7957ac07f957a628d815e7b126ab13e816a953ae46a859ed3.png" - ], - "iris.tests.test_plot.TestContourf.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e954a7a81857e957e857efc00857e7e007a85c02a7e859f287a85c1fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a81857e7a81857e7a806a85857a7a85857e7a85817e.png" - ], - "iris.tests.test_plot.TestContourf.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a6938b6b5969193901a4fc1e594a7c69999cbce33639879526e72330e65e4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a3c7e968597b19685c9c696a7c79491c16e59691a387f6978396e68683184.png" - ], - "iris.tests.test_plot.TestContourf.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa85857ec45a7a81857e854a857ee56a917ec56a3a85c56a3a85c4ea7a8112fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817e857e7a81857a7a81957a6e81917a6caa3a85c57a3a8585fa6a8591fe.png" - ], - "iris.tests.test_plot.TestContourf.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817ec40a7a81857e957e857ef40a857ef60b7a81c40a7b81e60f7a814aff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817e857e7a81857e7a81817a7e81817a668f7a91857e7a81857e7a85817e.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eab5313f954a7b9260f39789c5ec4cd084d0c4e45aa1c5fe3a04797bb13b3b06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be853f80854ac17ec0bdc2f5c17a0d09cc1fc07f5ab5e1fe3f409d7a38743e00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf813e85c07ec57ec17e9073c07e3f81856ec17a3f80c0fe3e813f84c2733e80.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eab5313f954a7b9260f39789c5ec4cd084d0c4e45aa1c5fe3a04797bb13b3b06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_orography.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa17291f95e895e8645e7a95c17a6eece4b4e1333b01c07e1bb13909914b9ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa17291f95e895e8645e7a91c17a6ee464f4e1333b01c17e1bb1390d914b9ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a91957a857ac4fe268cc07f6e846e05d9373b81d17b1b6a1b41c4fa2cc4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a91917a957ac4ff248cc07f6ea466a5c03f3b81c17f1b321b01935b3fc0.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_orography.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb07314fc4e0c6b4c31e9ee1847939a1c116c15e7b94e57e1ea9391de16e1ac3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bb07314fc6e1c6b4c31e9ee1846939a1c116c15e7b14e17e1ea9393de16e1ac3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af0b690f96f0d2d4c25e94a194ad3da19a52c25e3f02c07f3fa52d03c16a3fcb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea07695f95e0d2b4c09d95e0956a3da99294c2be3e85c07f3fa92b05c15e3f42.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953bfb956ac4f4649f1a05c56e6ca45a53945e6ea5c13f1b498542c13f1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe857b91917a847ec0bd3f01c47e6ca43b11915a3ea4db3b1b4a84c4c03f3fc1.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be813fc0c15ac13dc1bfc27dc17e1d93c51fc43f1ea1c17a3ec138e4b1721a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813a81c17ec57ec17e952ac07f3f808556c17e3f80c07f3e813f80c27e3f81.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9561ef956a7b92609b922dc16e6ec6845ac47e5aa5c57e5ec04861957b1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe856a85957a955ac03f956ac17f3f809552c07f3e81c07e3e806e85c07e3f84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953bfb956ac4f4649f1a05c56e6ca45a53945e6ea5c13f1b498542c13f1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe857b91917a847ec0bd3f01c47e6ca43b11915a3ea4db3b1b4a84c4c03f3fc1.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/baf5347ecf0ac3f1c1f68f83850b1f83cc11c0fc7ad0c17a1be138e4b07e1a0d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f.png" - ], - "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fac16ee0953b911bc15e9648e56ec4e691be7bcc7a8184733ea16a90c17e930d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816ac1857e853cc17f957ac15f3e849486c8f43e81c13b3f813e91c07e3f46.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_u.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f954a807e3f48951ac07e3e81c0ff7ea16a81c0ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f954ac07e3f44951ac07e3e81c07f7ea16aa1c0ff3e81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_u.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea956ab5954a954ac17e954a817f2f60950ac07f3e80c07f7a856aa5c2ff3f80.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_v.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562b6c0773d09956a955a857a1d88845ec57e3f81c07e4ae56b21d0ff5a85.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_v.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa9562d4c7c43d0bb57b97e0857a3f1995d284763a05c17a7b856a2dc0f45a84.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_none.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562b6c0763d09b54a955a857a3f88845ec57a3e85c07e6a616b25d0ff7a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a857ac17e954ac17e3fa29506c07e3e81c07f3e807a84c1ff3f81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_none.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562f6c0773d09b54a955a857a3f81955ac47e3e85c17e7aa16a25c0765aa1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17a1f06954ac07e3e81c07f3e817a85c0ff3f80.png" - ], - "iris.tests.test_plot.TestPcolor.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e67c9c7e1391e97a596b03a3696a13c4f63066318695ec5c9695e6c49c6a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa.png" - ], - "iris.tests.test_plot.TestPcolor.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b3878958b38f8c7236a557a542c7868d54b877875978abc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b387895ab38f8c7236a557a542c7868d54b05787197eab478972a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2bc956ac07e956a3509c0de61796ab57a816a856ad16ab590fb.png" - ], - "iris.tests.test_plot.TestPcolor.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e874978b978b6875978b6875978b7854950b78506855787468747ea2687597aa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7a84857a7a85857e7a813a2f7a817a85857a7a85857a7a85857a.png" - ], - "iris.tests.test_plot.TestPcolor.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95e696994b196b793b19a1ec3c191c5c6e596191e4e693269336c36391a6e3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a387e968596319697c3c19284a62c93a560c36933393a6c7e793b6c6b31cd.png" - ], - "iris.tests.test_plot.TestPcolor.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e87a197a1695a97a1695a97a17d5a97a17906785a7816685a7e86685ad687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7e01857e7a81857e7a81e8177a816a8585fa7a85857e7a81857e.png" - ], - "iris.tests.test_plot.TestPcolor.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0bd3f42d0bd7f02d0bd7f003d527f002f427ea82f42d6a8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0ad2b52d0bd7f02d0bd7f002d527f002f527e0d2f52d4ad.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e80857e7a81857e7a812d557a817a85857e7a81857e7a80857e.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d8cf17a1f94e2c119938e9463678c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957a6bc06ae76f806ad50fd06a859c50.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad5e94a5c3b0c3f096a5695a96a53c0f711b3c0f7d1b97b46943c3e0cc416b5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85857a857e7e81957a7a81957a6a85857acac6c1fb6aa67a81956e6a81b506.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e3c1f96a096a5d6a5eb40c3f0ebe0c1c0c3f07c0b3e3e96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e381f96a096a5d6a5eb40c3f0ebf0c1e0c3f07c0a3e3e96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817e81857e857a857e7a81857e6a85817b81e63a913e857e81c17e7a81956e.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bc7a1c32d3c366cdc585c39986cdc79ec792e3a6960d584939793c3438743873.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96ac78796953c4c9685383996c538e69692637263696b49693ac796693ac71b.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1f781f95e085e885e0954295e195ea95a085e87a153e7f95e06a1778557a1f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a81857e857e857e7a81857e6a81c17f95786aa77a807e81c17c7e819558.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba173a1795e895e8c5e8f400c1f8c1f895a8c5e87a077a5ec5e83e173e177e02.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a80857e857e857e7a81817e3e81817e857f6aa07a857e80c17f7e80c15f.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e67c9c7e1391e97a596b03a3696a13c4fe3026318695ec5c9695e6c49c6a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b3878958b38f8c7236a557a542c7868d54b877875978abc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b387895ab38f8c7236a557a542c7868d54b05787197eabc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2fc956ac07e956a3509c0de61796ab57a816a854ad16ab590fb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2bc956ac07e956a3509c0de61796ab57a916a856a916ab590fb.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e874978b978b6875978b6875978b7854950b78506855787468747ea2687597aa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7a84857a7a85857e7a813a2f7a817a85857a7a85857a7a85857a.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95e696994b196b593b19a1ec3c591c5c6e596191e4e693269336c36391a6e3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9693878969696139296c38f9bcc3474692169cb6c7339393c6cc387c78796cc.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a387e968596319697c3c19284a62c93ad60c36933393a6c7e793a6c6b31cd.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e87a197a1695a97a16d5a97a17d5a97a17806785a7816685a7e86685ad687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e6e05857e7a81857e7a81a0577a816a8585fa7a85857e7a81857e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7e01857e7a81857e7a81a0577a816a8585fa7a85857e7a85857e.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0bd3f42d0bd7f02d0bd7f002d527f002f427fa82f42d6a8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af4280bdd0ad2f52d0ad2b52d0bd7f02d0bd7f002d527f002f527f0d2f52d4ad.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e80857e7a81857e7a8129577a817a85857e7a81857e7a80857e.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d84f17a1d94e2c11d938e9463678e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d8cf17a1d94e2c11993ae9463678c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957e6bc06ae56f806ad50fd06a859c50.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957a6b806ae56f846ad50fd46a859c50.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad5e94a5c3b0c3f096a1695a96a53c1f711b3c0f791b97b46943c3e06c436b5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85857a857e7e81957a7a81957a6a85857acae6c1fb6aa47a81956e6a81b506.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e3c1f96a096a5d6a56b40c3f06be2c1c0c3f07c0b3ebe96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa813e81857e857a857e7a81857e6a85817b00e63eb93e857e81c17e7a81956e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa813e81857e857a857e7a81857e6a85817b0aa63e993e857e81c17e7a81956e.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bc7a1c32d3c366cdc785c39986cdc78ec792e7a6960d584939793c3438703873.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e129c7169ed638ec9ed6387196c761c665396724612dcf0d693896929ed698c9.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96ac79796953c4c9685383996c538e69692637261696b49693ac796693ac71b.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1f781f95e085e895e0fd4295e095ea95a085e87a153e7e95e06a1778157a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a85857a857e857e7a81857e7a81817f95506aaf7a807e81c17c7a81957a.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba176a1795e895e8c5e87c00c1f8c1f894a8c5e87a077adec5e83e173e177a06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a80857a857e857e7a81857e3e81817e2fc56aa07a857e80c17f7e80c17f.png" - ], - "iris.tests.test_plot.TestPlot.test_t.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe955f6a05e5137305d9c4f443127195187e9cd5467fa3d4917b68fc007a1a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe95027e05e7007305d9c4a447127f853f069f814f2fa7d4d12b6cfc007e5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe9c1a7e05e718f305d9d2e46312718138049e824e2fa783db2bed76b4fe00.png" - ], - "iris.tests.test_plot.TestPlot.test_t_dates.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15cdb6b10178d7d4082e57d7290906f685814277b1dc88724cfd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15c9b6a10178d7d4082c57d7290906f6c58942f7b1dc88724cfd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd4a02a01cc84f10078d7d4082c77d73909ded6ef816273bd9c98725cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8a7e054d83f5067bc1c1423471927ba73c8d9f864e09a1a7b358c8276f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8b7e044d81f5037bd4c14324749279a73e8d9d864f09e4a7b348dc2769.png" - ], - "iris.tests.test_plot.TestPlot.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe95297e87c74a6a059158f89c3d6ed0536597c0387836d0f87866d0697097.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1.png" - ], - "iris.tests.test_plot.TestPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff99c067e01e7166101c9c6b04396b5cd4e2f0993163de9c4fe7b79207e36a1.png" - ], - "iris.tests.test_plot.TestPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.TestPlotCitation.test.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCitation.test_axes.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCitation.test_figure.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e81857e7a81857e7a81857e7a818576c02a7e95856a7e81c17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe8142f5c17ebd2cc16eb548954a9542916a347a915e60bd4afd68793f916296.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f10956ac5e1957a854e957a203e955e6aa76ae17aa16a856aaf6ab19e12.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f10957a85e1957a854e957a203e955e6aa76ae17aa16a856a8f6ab1de12.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8542b7b503b548857abd08857abd09945eed6b91d968c161b972d76aa462b5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8542b7b503b548857abd08857abd09945eed6a91d96ac163b972d36aa462b5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853a85857a857a957a857a957ad05a857b3e946a606b917a816f647a853af4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853a85957a857a957a857a957ac05ac56b3ac46ae16b817a816f647a853af4.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bf88f457a03b5307e16b561f007b53ed067217ac1786afec0f570bf8178681a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bf98f057a03b5307e16b561f007b53ad067217ac1786afec0f570bf8178685a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9bc219530b696a56694c2852a95656b7b81986acdc0e516adad186eda.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafd86c9f8219430fe96a56684c3852e95656b7b85b86acdc0e5162da5186eda.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe8f367e05952afe05a50b980ded4bd05d69c2c1fb71c1c06272f4d0a06af4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7bd05952fbd0f950f914fcd48c47860f3e1b9329094266e345a850f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7fd05952dbd0f950f914fcd40c47868f3e1b9329094266e345a850f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff21eb6bd04952cbc0f950f914fedc1c0f961f3e1f9329084266e345a850f6c.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aa953d0f85fab50fd0f2956a7a1785fafa176877d00f68f1d02c60f2f008d0f0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebeaa5419e94b5019e97950d685395bee05361fad05560fad01570fef001dabe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebeaa5419e95b5419e97950d6853953ee053617ad05560fad01570fef001dabe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfaa56f96a1856cd681a56ee8162d52e8467e12c50c7e8095ad7e0095ad03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5699556854e9456854ed05625f9c0a92bfdc0a90afd81f97e00857e6af6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5299d56854e9156856ed05625fdc0292bfdc0a90afd85b97e00857e6ad6.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfaaf439e87b5019687b5019687b56ac05561fae07103fe6079687a607178f8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa2d4b968795059e87970f6854697ae055697ac08561fad041d7aef001d6ae.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb7a3e0c978187a4950190bc6856687a607e687bc0fcc1e394acfc0197fc2bfb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf73e0d9503852c950395ac9528c1fad16cc0f2d1ec6af2c0ec6a536a1797f3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf73e0c9402952c950195acd528c1fac1ecc1f3c1ec63f3c0ec6a536a179ff2.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aeb8b5095a87cd60386592d9ec97ad6dd23ca4f6d0797827f0096216c1f878e6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affa950ddb13c03634359ad8a4c80f26911f26f3c06e0ff3f4007b4285fd6e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afea950ddb13c03e34359ad8a4c86f24913f2693807e3ff1f4087b4285fd28f2.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fea97194f07c9c830d79169ce16269f91097af6c47861f6d0796076d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fee970b4f07c9c930d79129ce16269f91097af6c4f861f4d0786076d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afea97094f07c9c870d79129ce16269f91096af6c4f861f6c07960f6d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afee9632de05c9d9f180d168c454a53e931b3e84956a3b8c85d94ce703ff7284.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85603f956a9741951e9d83c1fa8d2fd0a55af0d25f345ae5f062c72d68612d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f00957ac07c957ac0be951a69f3c47c7a5f3a6127816b953e646b813761.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a69cc96ad92e193c9963385929e1cc3819acde6d965ce6e666b30386e65b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65767039740bc069d9ad00b8dadd03f52f181dd347a847a62ff81e8626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65777039740bc069d9ad00b8dadd03d52f181dd707a847a62ff81e8626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebffca44f502b36498309c9b940999add1bb62bba784374acc5a6a246acc6b65.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfeca44f102b3649c309c9b940d19add1bb63b3a7843e4acc5a6aa56acc6b64.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5649c434ac92e5d9c9361b95b39c38c3835a5ec6d966ced34c633099ace5a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c96a597a591c9949b94b61b69c7926b5bccce66646b3869b831a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e6b6c86a595a791c9349b94b71b69c7926b5bccca66646b1869b831a52ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c86a595a791c9349b94b73b69c7926b5bccca66646b3869b031a52ca6.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2dd2d09295c3c0c7d13c1bc6d23d2c696de0e53c3ac393daf6d205c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c3c0c7d13c1bc6d23d2c696ce0e53c3ac393dbf6d205c2c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2f92d09295c3d0c7d13c1bc6d23d2c696cf0e53c3ac2b3d9f6d201c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e2f97a1c19996a1c8f26c1e360f684a3c2c6913dca497b9d38097a903ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e3f96a1c3e197a169f1785e3b0e68523e1c398bc58687b1d86096e1039f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea153e0395aac1f895eac0f8940e69e56a743e5f7a432787691ef860c3c1938f.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9686d8c9696924797879e3b86929e58696d69cc6869659379626133398d9ccd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e961658f961e92469e1e1c7966f36cd86165618c70e166b39b9698719e1e9ec8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a530e29e5ecf199a5acd8f64f1326161a530e265999cd29e52cf199a5e6669.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96930749696cb9d9697cdc39692671b696c306969eb3c76697319942a0d8699.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf803f00c05fc4bfc07ec15dc05fd8bbc07cc96c333a32113bd02dd27ced3ec0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813ea0c17ec55ac17ed23dc07e295ac57e3b653f803f813e816e853e81b542.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95956a95626993941a6a2d956e6ed6845a6e65c4bec7b64a9594686ea19578.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85856e857e4893957a7aa1956a7b81954b3b817a856fd46a85846e6e85857e.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a327c847860fdc57a69beb0be68bd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a32fd847860fdc57269beb0be689d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bedcf25bc03a4929c103a5bf03fdbbc81cb364d86e46da70f86899b3a0f6cc0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/cbedcd25bc02a4929c103a5bf03fdbbc81cb364d84e46da70f86899b3a0f6ec1.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/cee8953a7a15856978579696d03d672cc49a6e5a842d3d2cc0b66bd1c2ea39f1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aee1f93a63168569b852d697913d632485ca2e43952d3bbcc2b66bd1426b3c71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aee1793a6b168569b852d697913c622cc5ca2e4b952d3bb4c2b66bd1426b3c71.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee953f0591ea3f07914a95fa7e07d1fa68156a15d07c6a3dd038c0fef000d0fa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f0591ea3f07914a95fa7e07d1fa68156a15d07c6a7dd068c0fef000d0fa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec11ab5c1be857ac13e7ae53c422d423e017a85b542fc00c1fefe0091fe03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec13a81c13ec56ac13e5afdd11e256a3e412afd3e4002ff2ee0fe0035fa817a.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a973d96a56953968769439685a54ae05117eae0511fba60513bba69717aba.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96a56953968769439685a54ae85197eae0511fba60513bba69717aba.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a96ac97a16c5897a1791e95a53b0b913c6953687c4ec3685cc6c36e7c87c3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1595ec95ea681d95ea7b0595ab3b13950d7a536a1cc6f26a0cc4f26e0c85f2.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9a53a59961ec5a62c691a587b9662e1c0e1e53e9e0e9b873ec15a7161bc642f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9a53a59961ec5a62c691a587b9662e1c0e1e53e9e0e9b873ec15a7161bc642f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_default.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81948e857e4971907ea72e95fa66b2952e4ead6d429b527ac7a5286e981836.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa85978e837e68f094d3673089626ad792073985659a9b1a7a15b52869f19f56.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea159694856a6b5096afa53a36941da1e4f5c369cd1ae6d69b6a1c80625af2f6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95969c874a63d39ca3ad2a231cdbc9c4973631cd6336c633182cbc61c3d3f2.png" - ], - "iris.tests.test_plot.TestPlotOtherCoordSystems.test_plot_tmerc.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e63399cd99cd64b29999335965369b262649c98c9b3966c6998d3319ccd69333.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999ecc9b3319b3246666cce69b496cccccc9669923193336666699a6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999acc9b3319b3246666cce69b496cccccc966996719333666669986.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999ecc92b399b32466269326b369cccccccd64d96199631364f33333.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_t.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb5d67fd4e5962211d9c6a443da77d5389c8ed346d923d011d968dc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82ffb5d67fdde5962211d9c6a441da77d5389c8cd346d927d011d968dc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fabd867fd5e5822201d9c6a4539a77953d8cbf834f99e7d051996cdc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb59a7f00e59a2205d9d6e4619a74d9388c8e884e8da799d30b6dddb47e00.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_t_dates.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd5ae7f51efb6200378d7d4082c17d7280906d6e58962db31d800da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4ae7f55efbe200178d7d4082c17d7280906d6e58962df319800da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4827f51ef94200078d7c4082c57d739095ed6ed8962db759808da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fd958a7e006f9ba0077bc5c9462c759873dd3c8d8f826699a187b358c82f67.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb5097e84c54a621799d8601d9966d213cd67c039d876d078d866d869d8f7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffbd097e84c54a621799d8601d9966d253cc27c039d876d078d866d869d8f7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3f9bc067e01c6166009c9c6b5439ee5cd4e0d2993361de9ccf65b79887636a9.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.TestSimple.test_bounds.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a85954a957ac17e954ac17a9c3e956ac07e3e80c07f3e857aa5c27d3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a85954a957ac17e954ac17a9d22956ac0fe3e81c07f3e857aa5c27d3f80.png" - ], - "iris.tests.test_plot.TestSimple.test_points.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e1ca2950bc07e3e80c07f3e807a85c1ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e0da2954bc07e3e81c07f3a806a85c1ff3f81.png" - ], - "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a330c96a5ccf2695a330c96a5ccf2695a330c96b5ccf3694a330c96b5ccf3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb52916494ad6e1b6b5291e494ad6e1b6b5291e494ad6e1b6b5291e494ad6e1b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13.png" - ], - "iris.tests.test_quickplot.TestLabels.test_alignment.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa95350f952ad2f0c1f66ac1c55a4af4e550a52b3e05905e1e419e6f937e3b21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa95350f952ad3f0c1f66a81e55a4af4e550a52b3e05905e1e419e6f937e1b21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be8137f4954ac03fc0ff3e81d03f496a6d00b4af3ea0c07f6fa232c0db7f2d00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be8137e0954ac05fc0ff3e81c07fc97a6d0094af3fa0c17f36a53244d97e2da0.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contour.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee321fc96666919b6ec15fdca593600d2586785a259dfa5a01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee3217c9e66691996ec15fdca593680d2586785a259dfa5a01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd95da7a01654a3217c962e4819a56c96f3c8593624da584da3b658db662db.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd95ea6a11258c3217c966e4019a56c96f3c859b62492584fe7a699db46adb.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contour.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa12bc1954ef43fc0bf9f02854a4ee48548c17a5ab5c17e7a0d7875a17e3a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802f85c17fc17fc07eb42ac07f3f929130c07e3f80c07f7aa02e85c07f3e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802e85c07fc17fc07eb42ac17f3f829161c06e3f81c07f7ba02e85c07f3e81.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe812f88957a955a857a9257c17f7aa5c03dc0bf5a85c07e7f402d40a57a3f01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a95957a957ac0fe1e8bc07f7f806e01c07f3f80c07f3fa23f00c07f3d00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a85957a955ac0ff1e8bc07f7f806e01c07f3f80c07f3fa23f80c07f3d00.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa12bc1954ef43fc0bf9f02854a4ee48548c17a5ab5c17e7a0d7875a17e3a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802f85c17fc17fc07eb42ac07f3f929130c07e3f80c07f7aa02e85c07f3e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802e85c07fc17fc07eb42ac17f3f829161c06e3f81c07f7ba02e85c07f3e81.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa852f81955ac532c0bf9e89c57edae69357e13f4ea0c05a3f8561a4935a3e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a95907ae508c17e955ac07f3fa0945bc07f3f80c07f3aa36f01c0ff3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816ab5907ae508c17e955ac07f3fa0945ac07f3f80c07f3aa32f81c0ff3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf_nameless.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa52ec1955ac536c0bf9e09c57edae69357e13f4e80c0da2f81618493da3f01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816af5907ee508c17e955ac03f3f809419c07f3f80c07f3a8b6f81c0ff3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816ab5907ee508c17e975ac07f3fa09459c07f3f80c07f3a812f81c0ff3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestLabels.test_map.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4ecef19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a793c9349b94b69969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolor.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb423d4e94a5c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb433d4e94a4c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f81c07e3fa12da1c27e3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12da1c27f3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85857a955ae17e957ec57e7a81855fc17e3a81c57e1a813a85c57a1a05.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a956ac17f950a807e3f4c951ac07e3f81c0ff3ea16aa1c0bd3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f950ac07e3f44951ac07e3f81c0ff3ea16aa1c0ff3e81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e1ca2950bc07e3e80c07f3e807a85c1ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e0da2954bc07e3e81c07f3a806a85c1ff3f81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdeec9f729943032168d66d4cb896e9567497b81304aedc96514ad8d18669a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf9a6c9f728943032168d66d4cb8d2e9567497b81304aedc9e51e2d9d186ada.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fa1e967f00950eb00f9d0f914fcdc2d560c9f3c1fb3a9084266e34daa52f6c.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b549f756854ea0168d6ed556896dd8e909ed88290afdd9e97e008d6e2296.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b529f756850ea0169566d1568d6dd86909ed88290afd9ded7e008d666ad6.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aad73e0df78085ac840395ac9428d9fad56cd8f2906c48f2d0ec7a536a1737f3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aad73e0cf710952c840195acd528c1e2d1ecc9f3c1ec49f3c1ec6a536a1737f3.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6ffb5097e84cde2224598d1649f8d6cd2388c76d0799867d009da76c9f8d866.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6bfb5097f84cde2224599d1649f8d6cd2388c76d0799867d009da76c1f8d866.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fbb50cfbd0c036203598dce4c88d26d32f8cf3886e1df3dc047b4289ec6e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb958dfb50c03e203598dca4c9cd26933f9cb3886e1df1dc047b4289ec2e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff978b7f00c9c830d7992166179e969509d866c478d964d079c876d869da26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff97837f00c9c830d79921661f9e9695099876c478d964c079c876d879da26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a2ffb6127f0dc9993085d960c6748d3e9b121ca49d6a1b048df34ce789ff7205.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a9ff16eb740954a9e05855a19a3c0fbc13e1ea5c07d5ad0cb58e45e3c35.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95957a957ac07e954ac17e3e87950bc07f3ea4c27d3e833ac1c1e03f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a565b69e1a9a42917e1a19c17b3a619e59c47b3a25c53e3b8430e5c57a3e85.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a761a79a589e58c07d1e48c07c3f819e41c07f3d84c17e3fa62585c0fe3f83.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/afffe6d67700958636179d92e019992dd039daf5817d987a807a48e499684a6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aeffe6d67780958636179d92e019892dd139daf5815d987a807a48e699684a6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaff6ad4f74ab16490109c9b942999add1b74bb785a41d4acd526a254acc6365.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aaffead4f7cab16490109c9b946d99add1b34bb385a41c4acd526a254acc6365.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c2d1c3d33c1bc2d67d2c696ce0653c3ac2b1d976da05c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c2d1c3d33c1bc2d27d2c696ce0e53c3ad2b1d976da01c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e2f97a1c19996a1c8f26d1e3a0f684a3c2c6913dc2497b9db8095e502ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3c1f97a1c3e197a1c9f37c5e390668521e0c390bdd8685b1d86096e5279f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea153f0395eac1f895eac9fa941c79e56a741e4f68430f876916f860c9c1938d.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e968658e969692c797879e3b86929e58696d49cd6869c9a37962c923990d9c6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e1658e961e92569e9e3c7966d36c586165698c70e1ce739b3698619e1e984c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a530e29e5ecf199a5acd8f64f1326161a538e665a198d29e52cb1d9a5e6669.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96930749696cf9d9697cdc39692670b696c386969eb3866696399a41a0d8e99.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf813f80c156c05dc0fec29dc17f1a6dd05fc0ff1aa1c57e3b243b20375a1e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a81d17ec57ac07e952ac07f3aa0955ec17e3f80c07f3f803f80c0bf3f81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95629d956a996069939e9bc07f7aad856cc47e5e81857a1e254a35c1be1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a957ac03f957ac07f3ba1954ac07e3e81c07f3ea47a85c07e3e80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f867f008d8220179852f01fd9bed1789a6c847cc877c46ac972987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179852f01fd9bed1789a6c847cc877c468c9f6987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179c52f01fd9bed1789a6c847cc877c560c976987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded05fe11a492b000985af07fdbb4d1e3366d8c644da79fa68993180f6ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded04ff11a492b000985af07fdbb4d1eb366d8c644da79fa68993180f6e81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600a56962df9e96f01dc926c498cc46847f9d6cd0244bf19a6b19f1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600856962df9e96f01dcd26c498cc46847f9d6cd0244bf19a6b1975.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aef9f93a770085e9205fd696d13c4b2485ca1a43952f1934daa66bd1ca6b3c71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aef9793a770085e9205fd696d03ccb2485ca1e43952f1934daa66bd1ca6b3c71.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f87d5e82d86801f91ee6e1591fe7e117876c07d6877d068d878d800d07a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f87d5e82d87801b91ee6e1599fe7e117874c07d6877d068d878d800d07a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec1329dc5be85dac01d58d73e419d423e41daa59822dc00c5fefe0091fe03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec13e81c5bec55ac03dd896d17e8d6a1e410af7380008ff1de6fe0099ea237b.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96856943969f694696858d4ee0519d6ee07f9b6a78619b2a79711a2a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96856943969f694696858d4ae0519d6ee07f996a78719b2a79711a3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e96ac97a168d897a5791695a19927913c3953687ecce3687c86e3487cc6c3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1595ac95e8689d95fb7b0595291963916f3b73487fccf2680484f2486ec6f0.png" - ], - "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_not_reference_time_units.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/82faa1977fdf89976200ddf6e000d9e7f75f9866d560dae4dc00d966dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82b8a1977fdf89876200dde6e000d9e7f77f9866d560dfe4dc00d966fc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a1977fdf89876200ddf6e000d9e7f77f9866d560dee4dc00d966dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a1977fdf89876200dde6e000d9e7f77f9866d560dfe4dc00dd64dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82faa19e7f51898c6001dd86845fd9a2dd7f996281ee19f389ef03ffdc007e00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a19e7f51888c6001dda6855fd9a2dd7f986281ee19f389ff03ffdc007e00.png" - ], - "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_reference_time_units.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fd777ffe0002addd4002805dda8de65dde9d4625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fdf77ffe0002a9dd4002805ddaade65d9a9d5625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fdf77ffe0002addd4002805dd28df67d9a9d4625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fa80997f547799a0037a00d52f0956ddaf9f7e98a1816e09f5d8260bfffe00.png" - ] + "gallery_tests.test_plot_COP_1d.0": "aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33", + "gallery_tests.test_plot_COP_maps.0": "ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94696935", + "gallery_tests.test_plot_SOI_filtering.0": "fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4", + "gallery_tests.test_plot_TEC.0": "e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c", + "gallery_tests.test_plot_anomaly_log_colouring.0": "ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c", + "gallery_tests.test_plot_atlantic_profiles.0": "97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58", + "gallery_tests.test_plot_atlantic_profiles.1": "eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc", + "gallery_tests.test_plot_coriolis.0": "e68665de9a699659c1fe99a5896965966996c46e3e19c1da3a652669c51e1a26", + "gallery_tests.test_plot_cross_section.0": "ea91b17b9562e4d1609f5a05856e4ca45a52957e5ea5f13b1bca9dc0b17b1ac1", + "gallery_tests.test_plot_cross_section.1": "ea9521fb956a394068931e93e07e4aa5856cc47e4a91957b1ba55bb5b17a3b81", + "gallery_tests.test_plot_custom_aggregation.0": "ee816f81917e907eb03ec73f856f7ac198d070186e90811f1be33ee1a57a6e18", + "gallery_tests.test_plot_custom_file_loading.0": "fa81cb47845e34bc932797436cccc8343f11359b73523746c48c72d9d9b34da5", + "gallery_tests.test_plot_deriving_phenomena.0": "ec97681793689768943c97e8926669d186e8c33f6c99c32e6b936c83d33e2c98", + "gallery_tests.test_plot_global_map.0": "fb997b958466846ed13e87467a997a898d66d17e2cc9906684696f99d3162e81", + "gallery_tests.test_plot_hovmoller.0": "eeb46cb4934b934bc07e974bc14b38949943c0fe3e94c17f6ea46cb4c07b3f00", + "gallery_tests.test_plot_inset.0": "ebff6992b50096ad9267dac4d640949294924cdbc95d4b699d29952dcda46ed4", + "gallery_tests.test_plot_lagged_ensemble.0": "bbbb31b1c44e64e4b1579b5b917133cecc61f146c414668eb1119b1bb197ce34", + "gallery_tests.test_plot_lagged_ensemble.1": "aafec5e9e5e03e099a07e0f86542db879438261ec3b13ce78d8dc65a92d83d89", + "gallery_tests.test_plot_lineplot_with_legend.0": "eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d", + "gallery_tests.test_plot_load_nemo.0": "a3ff34e87f0049496d17c4d9c04fc225d256971392db9f1696df0f16cec00736", + "gallery_tests.test_plot_orca_projection.0": "bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e", + "gallery_tests.test_plot_orca_projection.1": "e58661969e799659c1f719a6c867359a1996c0773649c09c3e612679c07b3f66", + "gallery_tests.test_plot_orca_projection.2": "a58660ce9e739b31c93d1c89c8df33863783e23b3f11c07f2664366cc8ee3cc1", + "gallery_tests.test_plot_orca_projection.3": "be817a8784dea56cec79817a919e338437a5c1e73fa16c726c4a3e816a1c6b1c", + "gallery_tests.test_plot_polar_stereo.0": "ba1e615ec7e097ad961f9cb190f038e091c2c1e73f07c11f6f386b3cc1793e01", + "gallery_tests.test_plot_polynomial_fit.0": "aeffcb34d244348be5a2c96c3a4fc6d0c4b69f2d87294ccb9f1a125684cd7c11", + "gallery_tests.test_plot_projections_and_annotations.0": "fa854f19851a30e4cc76cd0bb0f932dca7c665b0c93ccb4b4ed19e9c3721b5c8", + "gallery_tests.test_plot_projections_and_annotations.1": "e3856d999c389662734331afcd2d5a7184dba592b9b69b64d26dc29954b185b2", + "gallery_tests.test_plot_rotated_pole_mapping.0": "ee46607e97a19781c0de1f81d0bb3e241f20c16f3fc0c1fe3d263d33d06f3e80", + "gallery_tests.test_plot_rotated_pole_mapping.1": "ea57685f95a886a1c0de9da090be3e2697e1c0ff3f00c17e6b266c17c07f3f00", + "gallery_tests.test_plot_rotated_pole_mapping.2": "ea57685f95a886a1c0de9da090be3e2497e1c0ff3f01c17e6b366c17c07b3f00", + "gallery_tests.test_plot_rotated_pole_mapping.3": "fa8172c6857ecd38cb3392ce36c564311931d85ec64e9787719a39993c316e66", + "gallery_tests.test_plot_wind_barbs.0": "e9e161e996169316c1fe9e96c29e36739e13c07c3d61c07f39813929c07f3f01", + "gallery_tests.test_plot_wind_speed.0": "e9e960e996169306c1fe9e96c29e36739e03c06c3d61c07f3da139e1c07f3f01", + "gallery_tests.test_plot_wind_speed.1": "e9e960e996169306c1ee9f96c29e36739653c06c3d61c07f39a139e1c07f3f01", + "gallery_tests.test_plot_zonal_means.0": "b45b3071c9a4c9a6c69c363cc327cbb3cb9634d8c9e63cf336738c6634d8c384", + "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.0": "fe81c17e817e3e81817e3e81857e7a817e81c17e7e81c17e7a81817e817e8c2e", + "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.1": "fe81857e817e7a85817e7a81857e7e817e81917a7e81817e7a81817e817e843e", + "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.2": "be81817ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2f", + "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_northpolarstereo.0": "e59661969e699659c0f719a6c967339a1992c07f3649c09c3f612669c07b3f66", + "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_platecarree.0": "ee856299954a1da699b6915ec25b6e419729c42c3f84bd9fa6d262d1d1dac076", + "iris.tests.integration.plot.test_plot_2d_coords.Test2dContour.test_2d_coords_contour.0": "b4b2643ecb05cb43b0f23d80c53c4e1d3e5990eb1f81c19f2f983cb1c4ff3e42", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon.0": "eb036726c47c9273918e6e2c6f216336787590eb969a165890ee6c676925b3b3", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon_on_polar_map.0": "e66d673c999031cd6667663398dc332c676364e798959336636660d933998666", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_rotated_latlon.0": "eba037a4c479c273b2963f2c6f6126966865d86f969e33c9b1706c26692793b0", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_1d_coords.0": "a7ac334934d2e65c72596325b343338cb41c92d9c5b36f65330d379692ca6d6c", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_2d_coords.0": "a7acb36134d2e676627963259343330cb43e92d9c5336e67330d379292ca6d6c", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon.0": "fb8d4f21c472b27e919d2e216f216b3178e69c7e961ab39a84696c616d245b94", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon_on_polar_map.0": "e66c6619999933666666c6d99999336663646d9999c1332667b60cf964d8672c", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_rotated_latlon.0": "eba925a5c476d25a95a56b876f3826246a449c6b96a3731ab13f6c656a5cb48a", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_1d_coords.0": "a7ac24947259f3493697632df45926b6e126c4f392593b4937266f26ccf032d8", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_2d_coords.0": "afac26367251d3493617632df45c26a6e126c6f392593b4937266f26ccf232d0", + "iris.tests.test_analysis.TestProject.test_cartopy_projection.0": "9e1952c9c165b4fc668a9d47c1461d7a60fb2e853eb426bd62fd229c9f04c16d", + "iris.tests.test_mapping.TestBasic.test_contourf.0": "e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c", + "iris.tests.test_mapping.TestBasic.test_pcolor.0": "e97a347c96858b8d9685c9c39696c393966c634969ce3c64697a3864697b3c9c", + "iris.tests.test_mapping.TestBasic.test_unmappable.0": "ea853e48957ac1df957ac8be852bc1b1944e7a9a78e02f4c6a253e6c7a912dc2", + "iris.tests.test_mapping.TestBoundedCube.test_grid.0": "fa81857e857e7a81857e7a817a81817e7a81857e857e7a81857e7a817a81857e", + "iris.tests.test_mapping.TestBoundedCube.test_pcolormesh.0": "fa81c17e857e1ea1857e634a7a81cd257e8484da857e3b29817e68f47a81c799", + "iris.tests.test_mapping.TestLimitedAreaCube.test_grid.0": "fa81857e857e7a81857e7a817a817a817a81817e7a81857e857e857e857e7a81", + "iris.tests.test_mapping.TestLimitedAreaCube.test_outline.0": "fa81857e857e3e81857e7a857a817e817a81857a7a81817e857e857a857e7a81", + "iris.tests.test_mapping.TestLimitedAreaCube.test_pcolormesh.0": "ea813949957ec4b7917e3f60266978d97a9562376e81954a914ec6cc957a0f98", + "iris.tests.test_mapping.TestLimitedAreaCube.test_scatter.0": "ea05bd3e91eac2d984983d346b2473477acf69ad1d3296d8c696e126c1ab1a71", + "iris.tests.test_mapping.TestLowLevel.test_keywords.0": "be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3", + "iris.tests.test_mapping.TestLowLevel.test_keywords.1": "fa819897857e6530957e7bcc7a819c316ea1951e857e62c2857e79a17a819633", + "iris.tests.test_mapping.TestLowLevel.test_params.0": "fa8190be857e6739913a7bc47a8594337bb1911c857e6ec3913279807e819637", + "iris.tests.test_mapping.TestLowLevel.test_params.1": "be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3", + "iris.tests.test_mapping.TestLowLevel.test_params.2": "fa81909f857e6520957e7acc7a8194716e31851e857e6ac281fe3ba17a81963f", + "iris.tests.test_mapping.TestLowLevel.test_simple.0": "faa0e558855f9de7857a1ab16a85a51d36a1e55a854e58a5c13837096e8fe17a", + "iris.tests.test_mapping.TestMappingSubRegion.test_simple.0": "b9913d90c66eca6ec66ec2f3689195aecf5b2f00392cb3496495e21da4db6c92", + "iris.tests.test_mapping.TestUnmappable.test_simple.0": "fa81b54a817eca37817ec701857e3e64943e7bb41b806f996e817e006ee1b19b", + "iris.tests.test_plot.Test1dFillBetween.test_coord_coord.0": "f31432798cebcd87723835b4a5c5c2dbcf139c6c8cf4730bf3c36d801e380378", + "iris.tests.test_plot.Test1dFillBetween.test_coord_cube.0": "ea17352b92f0cbd42d6c8d25e59d36dc3a538d2bb2e42d26c6d2c2c8e4a1ce99", + "iris.tests.test_plot.Test1dFillBetween.test_cube_coord.0": "aff8e44af2019b3d3d03e0d1865e272cc1643de292db4b98c53c7ce5b0c37b2c", + "iris.tests.test_plot.Test1dFillBetween.test_cube_cube.0": "ea1761f695a09c0b70cc938d334b4e4f4c3671f2cd8b7996973c2c68e1c39e26", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord.0": "8bfec2577e01a5a5ed013b4ac4521c94817d4e6d91ff63369c6d61991e3278cc", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord.0": "8fff941e7e01e1c2f801c878a41e5b0d85cf36e1837e2d9992c62f21769e6a4d", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord_map.0": "bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_cube.0": "8ff897066a01f0f2f818ee1eb007ca41853e3b81c57e36a991fe2ca9725e29ed", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_coord.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_cube.0": "8ff8c0567a01b296e4019d2ff10b464bd4da6391943678e5879f7e3903e63f1c", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_coord_coord.0": "f314b2798ce3cd87723835a4a5c5c2dbcf139c6c8cf4730bd3c36d801c3c6378", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_coord_cube.0": "ea17352bd2f0cbd4256c8da5e59c36dc1a538d2b92e41d26ced2c2c8eca1ce99", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_cube_coord.0": "a3ffe44af6009b3d2907c8f1f6588f2cc96619e290fb4b88cd2c3ce590e3770c", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_cube_cube.0": "ea17e1f695a09c0b60cc938d334b4e4f4c3671f2cd8b7996973c2c69e1c31e26", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord.0": "83fec2777e002427e801bb4ae65a1c94813dcec999db4bbc9ccd79991f3238cc", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord.0": "83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e258982ce6f215c9a626c", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord_map.0": "bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_cube.0": "87ffb79e7f0060d8303fcd1eb007d801c52699e18d769e2199e60ce1da5629ed", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_coord.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_cube.0": "83ffc9d67e00909624059daef160cf4bd45a4b9184367ae5979f7e3909e6261c", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord.0": "b2ecc1a8b9994a16e666b5e3ce151969a5fb4ed49909653990d46b9bfc097684", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord_map.0": "bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_cube.0": "be852fc1e078c83eb30e3607672149c098d95c5b9e4636f2c1fc299d999f7e03", + "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_coord.0": "edfa96cb9a256b4f65466d9892d9c865693a1a9c94b39ed8484b35ad9a864c32", + "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_cube.0": "e8b33c129649c78de3a773e578650c728e92279be12de1edc4f246b2939c3b01", + "iris.tests.test_plot.Test1dScatter.test_coord_coord.0": "b2ecc12999994e16e666b5e3ce171969a5fb4ed49909e53990c44b9b7c09f684", + "iris.tests.test_plot.Test1dScatter.test_coord_coord_map.0": "bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6", + "iris.tests.test_plot.Test1dScatter.test_coord_cube.0": "be812fc1c078c03e930e3627672369c1d8d85c5b96463662e1fc699d9b9f7e03", + "iris.tests.test_plot.Test1dScatter.test_cube_coord.0": "edda96cb9a256b4765c26d9892dbc665693a1a9494b796c86c4b37ad92864c32", + "iris.tests.test_plot.Test1dScatter.test_cube_cube.0": "edb23c529649c78de38773e538650c729e92279be12de1edc4f246b2139c3b01", + "iris.tests.test_plot.Test2dPoints.test_circular_changes.0": "fa81c57a857e93bd9b193e436ec4ccb03b01c14a857e3e34911f3b816e81c57b", + "iris.tests.test_plot.TestAttributePositive.test_1d_positive_down.0": "a7fe781b708487c360079e3bb4789869816bdb64c76b4a3cce7b4e749a6130c5", + "iris.tests.test_plot.TestAttributePositive.test_1d_positive_up.0": "a7ff958b7a00b09c6617e1c1907fcb0d9163ce7895289a618f381bffccf97200", + "iris.tests.test_plot.TestAttributePositive.test_2d_positive_down.0": "fb966ba6846194dbd01f3665c0e4399a3f1bc2653f90c99e2f613e64c01e3f81", + "iris.tests.test_plot.TestAttributePositive.test_2d_positive_up.0": "ebc06be1941e941ec07f941f907f6fa0950fc07e6f80c07f6b806be1c07f3f80", + "iris.tests.test_plot.TestContour.test_tx.0": "eeece0173c07951fbd038748914964e8c14e72e9c1531ee1cc746bb293973ecd", + "iris.tests.test_plot.TestContour.test_ty.0": "ebfa8553fc01b15af4055a069546caa5954b7e9bc0f97d2cc2d62d360b362b49", + "iris.tests.test_plot.TestContour.test_tz.0": "8bfe805ffc00857ef0007e01d4027e80815fd56a81ff7a8085ff3aaa03ff6af5", + "iris.tests.test_plot.TestContour.test_yx.0": "e85e36cb95b199998e6d4d3694b26c78c7396329958434c2cacb6c6d69ce9392", + "iris.tests.test_plot.TestContour.test_zx.0": "affe8057fc00855cf8007e00d0027e808557d5ea815f7ea0817f2fea817f2bff", + "iris.tests.test_plot.TestContour.test_zy.0": "abff817ff801857afc017a80d4027e00855ec42a81fe7a8185fe6a8f05fe2abf", + "iris.tests.test_plot.TestContourf.test_tx.0": "ea857a81957a857e957a857a857a958ac5723b0d7ac56b833e856e606a933e90", + "iris.tests.test_plot.TestContourf.test_ty.0": "ea851f00957ac0f3957ac07f957a628d815e7b926ab13e816a953aac6a859ed3", + "iris.tests.test_plot.TestContourf.test_tz.0": "fa81857e857e7a81857a7a81857e7a81857e7a806a95857a7a85857e7a85817e", + "iris.tests.test_plot.TestContourf.test_yx.0": "e97a386e968597b19685c9c296a7c79493c16e59691a387f6978396e6c6a3184", + "iris.tests.test_plot.TestContourf.test_zx.0": "fa81817e857e7a81857a7a81957a6e81917a6caa3a85c57a3a8585fa6a8591fe", + "iris.tests.test_plot.TestContourf.test_zy.0": "fa81817e857e7a81857e7a81817a7e81817a668f7a91857e7a81857e7a85817e", + "iris.tests.test_plot.TestHybridHeight.test_bounds.0": "ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84", + "iris.tests.test_plot.TestHybridHeight.test_bounds.1": "bf813e85c07ec57ec17e9073c07e3f81856ec17a3f80c0fe3e813f84c2733e80", + "iris.tests.test_plot.TestHybridHeight.test_bounds.2": "ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84", + "iris.tests.test_plot.TestHybridHeight.test_orography.0": "fa817a91917a957ac4ff240cc07f6ea466a5c03f3b81c17f1b321b01d35b3fc0", + "iris.tests.test_plot.TestHybridHeight.test_orography.1": "ea07695f95e0d2b4c09d95e0956a3da99294c2be3e85c07f3fa92b05c15e3f42", + "iris.tests.test_plot.TestHybridHeight.test_points.0": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", + "iris.tests.test_plot.TestHybridHeight.test_points.1": "be813a81c17ec57ec17e952ac07f3f808556c17e3f80c07f3e813f80c27e3f81", + "iris.tests.test_plot.TestHybridHeight.test_points.2": "fe856a85957a955ac03f956ac17f3f809552c07f3e81c07e3e807e85c07e3f80", + "iris.tests.test_plot.TestHybridHeight.test_points.3": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", + "iris.tests.test_plot.TestHybridHeight.test_points.4": "b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f", + "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": "fa816ac1857e853cc17e957ac15f3e8494c6c8f43e81c13b3f813e91c07e3f46", + "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a954ac17f954a807e3f48951ac07e3f81c0ff7ea16a81c0bf3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e9542817f2f60950ac07f3e80c0ff7a856aa5c2ff3f80", + "iris.tests.test_plot.TestMissingCoord.test_no_v.0": "fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_v.1": "fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80", + "iris.tests.test_plot.TestMissingCoord.test_none.0": "fa816a85957a857ac17e954ac17e3fa2950ac07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_plot.TestMissingCoord.test_none.1": "fa856a85957a957ac17e954ac17a1f06954ac07f3e81c07f3e817a85c0fd3f80", + "iris.tests.test_plot.TestPcolor.test_tx.0": "ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa", + "iris.tests.test_plot.TestPcolor.test_ty.0": "ea953f83954ac2fc956ac07e956a3509c0de61796ab57a916a854a916ab590fb", + "iris.tests.test_plot.TestPcolor.test_tz.0": "fa81857e857a7a84857a7a81857e7a813e2f7a817a85857a7a85857a7a85857a", + "iris.tests.test_plot.TestPcolor.test_yx.0": "e97a387e968596319697c3c19284a62c93ad60c36933393a6c7a793b6c6b31cd", + "iris.tests.test_plot.TestPcolor.test_zx.0": "fa81857e857a6e05857e7a81857e7a81e0577a816a8585fa7a85857e7a81857e", + "iris.tests.test_plot.TestPcolor.test_zy.0": "fa81857e857e7e80857e7a81857e7a812d577a816a85857e7a81857e7a80857e", + "iris.tests.test_plot.TestPcolorNoBounds.test_tx.0": "ea858782957a603f957a3878957a7a7d957a6bc06ae56f806ad50fd06a859c50", + "iris.tests.test_plot.TestPcolorNoBounds.test_ty.0": "ea85857a857e7e81957a7a81957a6a85857acaa6c1fb6aa67a81956e6a81b506", + "iris.tests.test_plot.TestPcolorNoBounds.test_tz.0": "fa817e81857e857a857e7a81857e6a85817b81e63e813e857e81c17e7a81956e", + "iris.tests.test_plot.TestPcolorNoBounds.test_yx.0": "e96ac78796953c4c9685383996c538e69792637063696b49693ac796693ac71b", + "iris.tests.test_plot.TestPcolorNoBounds.test_zx.0": "fa817a81857e857e857e7a81857e6a84c17f95786aa77a807e81c17c7e819558", + "iris.tests.test_plot.TestPcolorNoBounds.test_zy.0": "fa817a80857e857e857e7a81817e3e81817e857f6aa07a857e80c17f7e80c15f", + "iris.tests.test_plot.TestPcolormesh.test_tx.0": "ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa", + "iris.tests.test_plot.TestPcolormesh.test_ty.0": "ea953f83954ac2fc956ac07e956a3509c0de71796ab57a816a854a916ab590fb", + "iris.tests.test_plot.TestPcolormesh.test_tz.0": "fa81857e857a7e84857a7a81857e7a813a0f7a817a85857b7a85857a7a85857a", + "iris.tests.test_plot.TestPcolormesh.test_yx.0": "e97a387e968596319697c3c19284a62c93ad60c36933393a6c7e793a6c6b31cd", + "iris.tests.test_plot.TestPcolormesh.test_zx.0": "fa81857e857a7e01857e7a81857e7a81e0577a816a8585fa7a85857e7a81857e", + "iris.tests.test_plot.TestPcolormesh.test_zy.0": "fa81857e857e7e80857e7a81857e7a8125577a817a85817f7a81857e7a80857e", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_tx.0": "ea858782957a603f957a387a957a7a6d957a6bc06ae56f806ad50fd06a859c50", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_ty.0": "ea85857a857e3e81957a7a81957a6a85857acae6c1fb6aa67a81956e6a81b506", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_tz.0": "fa813e81857e857a857e7a81857e6a85817b0aa63e993e857e81c17e7a81956e", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_yx.0": "e96ac79796953c4c9685383996c538e69692637261696b49693ac796693ac71b", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_zx.0": "fa817a85857a857e857e7a81857e7a81c17f95506aaf7a807e81c17c7a81857a", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_zy.0": "fa817a80857a857e857e7a81817e3e81817e2f756aa47a817e80c17f7e80c17f", + "iris.tests.test_plot.TestPlot.test_t.0": "8ffe9c1a7e05e718f305d9d2e463127181380c9e824e2fa781db2bed76b4fe00", + "iris.tests.test_plot.TestPlot.test_t_dates.0": "87fc9d8b7e044d81f5037bd4c14324749279a73e8d9d864f09e4a7b348dc2769", + "iris.tests.test_plot.TestPlot.test_x.0": "8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1", + "iris.tests.test_plot.TestPlot.test_y.0": "aff8946c7a14c99fb193d263e42432d8d00c2d27944a3f8dc5223ef703ff6b90", + "iris.tests.test_plot.TestPlot.test_z.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.TestPlotCitation.test.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCitation.test_axes.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCitation.test_figure.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": "fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853f10956ac1e1957a854e957a207e955e6aa76ae17aa16a856aaf6ab19e12", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea853a85857a857a957a857a957ed05a857b3e946a606b917a816f247a853af4", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aff24ab7fd05952dbd0f950f910fed48c47868f2e1b9329094266e345a850f6c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf73e0d9503852c950395ac9528c1fad06cc0f2d1ec6af2c0fc6a536a1797f3", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": "afea950ddb13c03e34359ad8a4c86f24913f2693806e3ff1f4087b4285fd2af2", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": "afee9632de05c9d9f180d168c454a53e931b3e84954a3b8c85f94ce703ff7284", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": "ea853f00957ac07c957ac0bf951a69f3c47c7a5f3a4127816b953e646b813761", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.1": "e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.2": "ebffca44f102b3609c309c9b940d19add1bb63b3a7843e4acc5a6aa56acc6b64", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.3": "e85a6b6c86a595a791c9349b94b63b69c7926b5bccca66646b3869b831a52ca6", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.4": "ea153e0395aac0f895eac1f8941e69e56a743e5d7a432787691ef860c3c1938f", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.5": "e96930749696cb9d9697cdc39692671b696c306969eb3c76697319942a0d8699", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.0": "be813ea0c17ec55ac17ed23dc07e295ac57e2b653f803f813e816e853e85b542", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.1": "ea85856e857e4893957a7aa1956a7b81954b3b817a856fd46a85847c6e85857e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.2": "cbedcd25bc02a4929c103a5bf03fdbbc81cb364d84e46da70f86899b3a0f6ec1", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.3": "aee1793a6b168569b852d697913c622cc5ca2e4b952d3bb4c2b66bd1426b3c71", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.4": "bec13a81c13ec54ac13e5afdd11e256a3e412afd3e4002ff2ee0fe0035fa817e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.5": "ea1594ec95ea6c1d95ea7b0595ab3b13950f6a536a1cc6f26a0cc4f26e0c85f2", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.1": "b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.1": "b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_default.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.0": "fa85978e837e68f094d3673089626ad792073985659a9b1a7a15b52869f19f56", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.1": "ea95969c874a63d39ca3ad2a231cdbc9c4973631cd6336c633182cbc61c3d3f2", + "iris.tests.test_plot.TestPlotOtherCoordSystems.test_plot_tmerc.0": "e665326d999ecc92b399b32466269326b369cccccccd64d96199631364f33333", + "iris.tests.test_plot.TestQuickplotPlot.test_t.0": "83ffb59a7f00e59a2205d9d6e4619a74d9388c8e884e8da799d30b6dddb47e00", + "iris.tests.test_plot.TestQuickplotPlot.test_t_dates.0": "82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69", + "iris.tests.test_plot.TestQuickplotPlot.test_x.0": "82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1", + "iris.tests.test_plot.TestQuickplotPlot.test_y.0": "a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10", + "iris.tests.test_plot.TestQuickplotPlot.test_z.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85954a957ac17e954ac17a9d3a956ac07e3e80c07f3e857aa5c27d3f80", + "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": "eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13", + "iris.tests.test_quickplot.TestLabels.test_alignment.0": "be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82", + "iris.tests.test_quickplot.TestLabels.test_contour.0": "a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db", + "iris.tests.test_quickplot.TestLabels.test_contour.1": "bf802f85c17fc17fc07eb42ac17f3f929130c06e3f80c07f7aa02e85c07f3e81", + "iris.tests.test_quickplot.TestLabels.test_contourf.0": "be816a95957a957ac0fe1e8bc07f7f806e01c07f3f80c07f3fa23f00c07f3d00", + "iris.tests.test_quickplot.TestLabels.test_contourf.1": "bf802f85c17fc17fc07eb42ac17f3f929130c06e3f80c07f7aa02e85c07f3e81", + "iris.tests.test_quickplot.TestLabels.test_contourf.2": "be816a95907ae508c17e955ac07f3fa0945bc07f3f80c07f3aa36f01c0ff3f80", + "iris.tests.test_quickplot.TestLabels.test_contourf_nameless.0": "be816af5907ee508c17e955ac03f3f809419c07f3f80c07f3a8b6f81c0ff3f80", + "iris.tests.test_quickplot.TestLabels.test_map.0": "e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestLabels.test_map.1": "e85a636c86a597a793c9349b94b69969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestLabels.test_pcolor.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", + "iris.tests.test_quickplot.TestLabels.test_pcolormesh.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", + "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": "eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12d21c2ff3f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": "fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a807e3f4e951ac07e3f81c0ff3ea16aa1c0bd3e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9eec9f729943032168d66d4db896e9567497b81304aedc96514ad8d18669a", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9b549f756854ea0168d6ed556896fd8a909ed88290afdd9e97e008d6e2296", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "aad73e0df78085ac840195ac9528d9fad56cd8f2906c48f2d0ec7a536a1737f3", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": "a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": "a2ffb6127f0dc9992085d960c6748d3edb121ca49d6a1b048df34ce789ff7205", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": "ea856a95957a957ac07e954ac17e3e86950bc17f3ea4c27d3e833ac1c1e03f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.1": "e5a761a79a589e58c07d1e48c07c3f819e41c07f3d84c17e3fa62585c0fe3f83", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.2": "aaffead4f7cab16490109c9b946d99add1b74bb385a41c4acd526a254acc6325", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.3": "e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.4": "ea153f0395eac1f895eac9fa941c79e56a741e4f68430f876916f860c9c1938d", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.5": "e96930749696cf9d9697cdc39692670b696c386969eb3866696399a41a0d8e99", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.0": "be816a81d17ec57ac07e952ac07f3aa0955ec17e3f80c07f3f803f80c0bf3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.1": "fa816a85957a957ac03f957ac07f3ba1954ac07e3e81c07f3ea47a85c07e3e80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.2": "a3eded04ff11a492b000985af07fdbb4d1eb366d8c644da79fa68993180f6e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.3": "aef9793a770085e9205fd696d03ccb2485ca1e43952f1934daa66bd1ca6b3c71", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.4": "bec13e81c5bec55ac03dd8b4d17a8d6a1e4108f7384008ff1de6fe0099ee237b", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.5": "ea1595ac95e8689d95fb7b0595291943916f3b73487fccf2680484f2486ec7f0", + "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_not_reference_time_units.0": "82f8a19e7f51888c6001dda6855fd9e2dd7f986281ee19f389ef03ffdc007e00", + "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_reference_time_units.0": "82fa80997f547799a0037a00d52f0956ddaf9f7e98a1816e09f5d8260bfffe00" } \ No newline at end of file diff --git a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl index 1f6bc36832..2873f68205 100644 --- a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl +++ b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl @@ -13,7 +13,7 @@ variables: time:climatology = "time_climatology" ; time:units = "days since 1970-01-01 00:00:00-00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl b/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl index cfb3143050..762226192c 100644 --- a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl index 88c5fc18fe..6fed33430a 100644 --- a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl index 65da679ad0..fece18b1f3 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl index 65da679ad0..fece18b1f3 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl index d7a39d72de..c85ba6aadd 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl index 5ff22a679b..d813ab98dc 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml index 4d37f856ad..09d54a1b19 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml +++ b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml @@ -58,7 +58,7 @@ [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" standard_name="surface_altitude" units="Unit('m')" value_type="int64" var_name="surface_altitude"/> - + @@ -122,7 +122,7 @@ [1240, 1250, 1260, 1270, 1280, 1290]]" shape="(5, 6)" units="Unit('m')" value_type="int64" var_name="surface_altitude_0"/> - + diff --git a/lib/iris/tests/results/merge/dec.cml b/lib/iris/tests/results/merge/dec.cml index ea72b506f0..4efd40910f 100644 --- a/lib/iris/tests/results/merge/dec.cml +++ b/lib/iris/tests/results/merge/dec.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -291,7 +291,7 @@ - + @@ -411,7 +411,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -432,7 +432,7 @@ - + - + diff --git a/lib/iris/tests/results/merge/theta.cml b/lib/iris/tests/results/merge/theta.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/merge/theta.cml +++ b/lib/iris/tests/results/merge/theta.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/merge/theta_two_times.cml b/lib/iris/tests/results/merge/theta_two_times.cml index 0dd396e337..d1c9f59ace 100644 --- a/lib/iris/tests/results/merge/theta_two_times.cml +++ b/lib/iris/tests/results/merge/theta_two_times.cml @@ -399,7 +399,7 @@ - + - + diff --git a/lib/iris/tests/results/name/NAMEIII_field.cml b/lib/iris/tests/results/name/NAMEIII_field.cml index 97b3189bba..c419a2760d 100644 --- a/lib/iris/tests/results/name/NAMEIII_field.cml +++ b/lib/iris/tests/results/name/NAMEIII_field.cml @@ -48,7 +48,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -177,7 +177,7 @@ - + @@ -241,7 +241,7 @@ - + @@ -305,7 +305,7 @@ - + diff --git a/lib/iris/tests/results/name/NAMEIII_timeseries.cml b/lib/iris/tests/results/name/NAMEIII_timeseries.cml index c4e70590a2..3776bfc27f 100644 --- a/lib/iris/tests/results/name/NAMEIII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEIII_timeseries.cml @@ -58,7 +58,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -129,7 +129,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -199,7 +199,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -269,7 +269,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -339,7 +339,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory.cml b/lib/iris/tests/results/name/NAMEIII_trajectory.cml index c514d589ca..20a0ec3b82 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory.cml @@ -16,7 +16,7 @@ - + @@ -39,7 +39,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -61,7 +61,7 @@ - + @@ -84,7 +84,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -106,7 +106,7 @@ - + @@ -129,7 +129,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -151,7 +151,7 @@ - + @@ -174,7 +174,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -196,7 +196,7 @@ - + @@ -219,7 +219,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -241,7 +241,7 @@ - + @@ -264,7 +264,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -286,7 +286,7 @@ - + @@ -309,7 +309,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -331,7 +331,7 @@ - + @@ -354,7 +354,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -376,7 +376,7 @@ - + @@ -399,7 +399,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -421,7 +421,7 @@ - + @@ -444,7 +444,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -466,7 +466,7 @@ - + @@ -489,7 +489,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -511,7 +511,7 @@ - + @@ -534,7 +534,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -556,7 +556,7 @@ - + @@ -579,7 +579,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -601,7 +601,7 @@ - + @@ -624,7 +624,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -646,7 +646,7 @@ - + @@ -669,7 +669,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -691,7 +691,7 @@ - + @@ -714,7 +714,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -736,7 +736,7 @@ - + @@ -759,7 +759,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml index 5f10016f39..d337ca9454 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml @@ -16,7 +16,7 @@ - + @@ -39,7 +39,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_version2.cml b/lib/iris/tests/results/name/NAMEIII_version2.cml index 95b9db7d5b..0ad0c883a2 100644 --- a/lib/iris/tests/results/name/NAMEIII_version2.cml +++ b/lib/iris/tests/results/name/NAMEIII_version2.cml @@ -76,7 +76,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -158,7 +158,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -240,7 +240,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -322,7 +322,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEII_field.cml b/lib/iris/tests/results/name/NAMEII_field.cml index 664669ef62..7d88c06eff 100644 --- a/lib/iris/tests/results/name/NAMEII_field.cml +++ b/lib/iris/tests/results/name/NAMEII_field.cml @@ -51,7 +51,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -166,7 +166,7 @@ - + @@ -227,7 +227,7 @@ - + @@ -288,7 +288,7 @@ - + diff --git a/lib/iris/tests/results/name/NAMEII_timeseries.cml b/lib/iris/tests/results/name/NAMEII_timeseries.cml index 52aaa8b809..39af8a6288 100644 --- a/lib/iris/tests/results/name/NAMEII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEII_timeseries.cml @@ -36,7 +36,7 @@ [370473.5, 370474.5], [370474.5, 370475.5], [370475.5, 370476.5]]" id="cb784457" points="[370345.0, 370346.0, 370347.0, ..., 370474.0, - 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -85,7 +85,7 @@ [370473.5, 370474.5], [370474.5, 370475.5], [370475.5, 370476.5]]" id="cb784457" points="[370345.0, 370346.0, 370347.0, ..., 370474.0, - 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl index e6a18dd2e4..da0d1d10db 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl index 22ee23e2f6..ef1ef973e2 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl index 50ebd1abc9..1d33942464 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl index 9ae68a1112..5a0edc7528 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl index 4d54fe36f0..81d32bf80c 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl index 84516e186f..c6b29c5bda 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml index ca4a0eb017..c748853c5c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml +++ b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml @@ -9,7 +9,7 @@ - + @@ -29,7 +29,7 @@ - + @@ -50,7 +50,7 @@ - + @@ -75,7 +75,7 @@ - + @@ -98,7 +98,7 @@ - + @@ -121,7 +121,7 @@ - + @@ -140,7 +140,7 @@ - + @@ -159,7 +159,7 @@ - + @@ -179,7 +179,7 @@ - + @@ -199,7 +199,7 @@ - + @@ -222,7 +222,7 @@ - + @@ -241,7 +241,7 @@ - + @@ -261,7 +261,7 @@ - + @@ -281,7 +281,7 @@ - + @@ -304,7 +304,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -336,7 +336,7 @@ - + @@ -349,7 +349,7 @@ - + @@ -362,7 +362,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -394,7 +394,7 @@ - + @@ -413,7 +413,7 @@ - + @@ -433,7 +433,7 @@ - + @@ -450,7 +450,7 @@ - + @@ -463,7 +463,7 @@ - + @@ -476,7 +476,7 @@ - + @@ -489,7 +489,7 @@ - + @@ -502,7 +502,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml index a11d593684..3847d5a417 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml index 30e6844591..89ee5ac195 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml index 6f9446582a..b3c7709dae 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml index 12def7cea4..ea5e42150e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml index b20281c53e..b028ee6cf8 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml index 0d126109cf..76f66e1bc4 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml @@ -17,7 +17,7 @@ + 929298, 929304]" shape="(20,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml index 8cfb4a0b5f..133cc4f659 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml @@ -15,7 +15,7 @@ + 929226, 929232, 929238, 929244]" shape="(10,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml index 9259a07563..1d7025751e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml index 6bc1a094e3..1f5a990bd4 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml index 0535339c7e..9c32197e56 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml index 6a0f9a90bf..100ab1257c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml index bda7f9ed9f..22a4ff1989 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml @@ -82,7 +82,7 @@ 71603.5, 71604.5, 71605.5, 71606.5, 71607.5, 71608.5, 71609.5, 71610.5, 71611.5, 71612.5, 71613.5, 71614.5, 71615.5, 71616.5, 71617.5, - 71618.5]" shape="(31,)" standard_name="time" units="Unit('days since 1850-01-01', calendar='gregorian')" value_type="float64" var_name="time"/> + 71618.5]" shape="(31,)" standard_name="time" units="Unit('days since 1850-01-01', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml index 1204fd0d39..fc6772e5f0 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml @@ -19,7 +19,7 @@ 929262, 929268, 929274, 929280, 929286, 929292, 929298, 929304, 929310, 929316, 929322, 929328, 929334, 929340, 929346, 929352, 929358, 929364, - 929370]" shape="(31,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='gregorian')" value_type="int32" var_name="time"/> + 929370]" shape="(31,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml index ac41f4a8b8..9d6b3c1e43 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + @@ -46,7 +46,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml index 4234b5cc84..15ab300757 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml index 17d87a0190..29ff3b9bd9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_laea.cml b/lib/iris/tests/results/netcdf/netcdf_laea.cml index ad23114038..799f40522b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_laea.cml +++ b/lib/iris/tests/results/netcdf/netcdf_laea.cml @@ -11,7 +11,7 @@ - + @@ -63,7 +63,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_lcc.cml b/lib/iris/tests/results/netcdf/netcdf_lcc.cml index 7ea53e6600..592c33d534 100644 --- a/lib/iris/tests/results/netcdf/netcdf_lcc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_lcc.cml @@ -88,7 +88,7 @@ [273.0, 303.0], [304.0, 333.0], [334.0, 364.0]]" id="1c4a69ce" long_name="time" points="[15.0, 44.5, 74.0, 104.5, 135.0, 165.5, 196.0, - 227.0, 257.5, 288.0, 318.5, 349.0]" shape="(12,)" standard_name="time" units="Unit('days since 2010-01-01 12:00:00', calendar='gregorian')" value_type="float64" var_name="time"/> + 227.0, 257.5, 288.0, 318.5, 349.0]" shape="(12,)" standard_name="time" units="Unit('days since 2010-01-01 12:00:00', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_merc.cml b/lib/iris/tests/results/netcdf/netcdf_merc.cml index 02fc4e7c34..c06a2efe88 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc.cml @@ -53,19 +53,19 @@ 45.5158, 45.9993]]" shape="(192, 192)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml new file mode 100644 index 0000000000..1e50aa6e65 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml new file mode 100644 index 0000000000..c9ad4ca33f --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml index 578b2b6d96..3385ecd6fe 100644 --- a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml +++ b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml @@ -12,7 +12,7 @@ - + @@ -30,7 +30,7 @@ - + @@ -48,7 +48,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_polar.cml b/lib/iris/tests/results/netcdf/netcdf_polar.cml new file mode 100644 index 0000000000..15c1a90da9 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_polar.cml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml index b236a3677d..05e5fe475d 100644 --- a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml +++ b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml @@ -54,7 +54,7 @@ + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl index 1863d1ee7d..74a83c9714 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl @@ -22,7 +22,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; @@ -46,7 +46,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; float level_height(model_level_number) ; level_height:bounds = "level_height_bnds" ; level_height:units = "m" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml index 8e4a005d44..fbecdf97d3 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml @@ -418,7 +418,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64" var_name="forecast_period"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml index 13582b3106..54bcc8a686 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml @@ -54,7 +54,7 @@ + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl index 32d4163d01..f8180d4ea8 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl @@ -22,7 +22,7 @@ variables: time:units = "days since 1950-01-01 00:00:00.0" ; time:standard_name = "time" ; time:long_name = "Julian Day" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; float time_bnds(time, bnds) ; float rlat(rlat) ; rlat:axis = "Y" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl index 0e3ae7e715..642e46a905 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl @@ -50,7 +50,7 @@ variables: double time ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; // global attributes: :source = "Iris test case" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl index 601ea11719..d49e775024 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl index b86a77aa62..8353df60e9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl index e45496521c..9847532001 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl @@ -30,7 +30,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -39,7 +39,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/netcdf/netcdf_stereo.cml b/lib/iris/tests/results/netcdf/netcdf_stereo.cml index b07304cd62..fae7ff027b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_stereo.cml +++ b/lib/iris/tests/results/netcdf/netcdf_stereo.cml @@ -54,19 +54,19 @@ 10.449, 10.5996]]" shape="(160, 256)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml index 2d909ba57e..0575c684a9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml +++ b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml @@ -62,7 +62,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/save_load_traj.cml b/lib/iris/tests/results/netcdf/save_load_traj.cml index 7f8b3d7e99..9b225d127f 100644 --- a/lib/iris/tests/results/netcdf/save_load_traj.cml +++ b/lib/iris/tests/results/netcdf/save_load_traj.cml @@ -1,6 +1,6 @@ - + @@ -36,6 +36,6 @@ - + diff --git a/lib/iris/tests/results/nimrod/load_2flds.cml b/lib/iris/tests/results/nimrod/load_2flds.cml index b068657d40..41e92dd48b 100644 --- a/lib/iris/tests/results/nimrod/load_2flds.cml +++ b/lib/iris/tests/results/nimrod/load_2flds.cml @@ -14,7 +14,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/period_of_interest.cml b/lib/iris/tests/results/nimrod/period_of_interest.cml index 258e5bcbbc..4c495b212a 100644 --- a/lib/iris/tests/results/nimrod/period_of_interest.cml +++ b/lib/iris/tests/results/nimrod/period_of_interest.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/probability_fields.cml b/lib/iris/tests/results/nimrod/probability_fields.cml index 7add3e75a4..184d205132 100644 --- a/lib/iris/tests/results/nimrod/probability_fields.cml +++ b/lib/iris/tests/results/nimrod/probability_fields.cml @@ -17,7 +17,7 @@ - + + @@ -62,7 +62,7 @@ - + + @@ -111,7 +111,7 @@ - + @@ -131,7 +131,7 @@ - + @@ -158,7 +158,7 @@ - + @@ -186,7 +186,7 @@ - + @@ -210,7 +210,7 @@ - + @@ -237,7 +237,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -291,7 +291,7 @@ - + @@ -315,7 +315,7 @@ - + @@ -331,7 +331,7 @@ - + @@ -358,7 +358,7 @@ - + @@ -378,7 +378,7 @@ - + @@ -402,7 +402,7 @@ - + @@ -422,7 +422,7 @@ - + @@ -448,7 +448,7 @@ - + @@ -461,7 +461,7 @@ - + @@ -484,7 +484,7 @@ - + @@ -497,7 +497,7 @@ - + @@ -521,7 +521,7 @@ - + @@ -537,7 +537,7 @@ - + @@ -560,7 +560,7 @@ - + @@ -573,7 +573,7 @@ - + @@ -600,7 +600,7 @@ - + @@ -620,7 +620,7 @@ - + @@ -644,7 +644,7 @@ - + @@ -664,7 +664,7 @@ - + @@ -688,7 +688,7 @@ - + @@ -701,7 +701,7 @@ - + @@ -726,7 +726,7 @@ - + @@ -742,7 +742,7 @@ - + @@ -766,7 +766,7 @@ - + @@ -779,7 +779,7 @@ - + @@ -807,7 +807,7 @@ - + @@ -828,7 +828,7 @@ - + @@ -853,7 +853,7 @@ - + @@ -873,7 +873,7 @@ - + @@ -901,7 +901,7 @@ - + @@ -922,7 +922,7 @@ - + @@ -949,7 +949,7 @@ - + @@ -969,7 +969,7 @@ - + @@ -992,7 +992,7 @@ - + @@ -1005,7 +1005,7 @@ - + @@ -1029,7 +1029,7 @@ - + @@ -1042,7 +1042,7 @@ - + @@ -1067,7 +1067,7 @@ - + @@ -1083,7 +1083,7 @@ - + @@ -1110,7 +1110,7 @@ - + @@ -1130,7 +1130,7 @@ - + @@ -1153,7 +1153,7 @@ - + @@ -1166,7 +1166,7 @@ - + @@ -1190,7 +1190,7 @@ - + @@ -1213,7 +1213,7 @@ - + @@ -1236,7 +1236,7 @@ - + @@ -1256,7 +1256,7 @@ - + @@ -1280,7 +1280,7 @@ - + @@ -1303,7 +1303,7 @@ - + @@ -1326,7 +1326,7 @@ - + @@ -1346,7 +1346,7 @@ - + @@ -1369,7 +1369,7 @@ - + @@ -1389,7 +1389,7 @@ - + @@ -1417,7 +1417,7 @@ - + @@ -1444,7 +1444,7 @@ - + @@ -1468,7 +1468,7 @@ - + @@ -1495,7 +1495,7 @@ - + @@ -1518,7 +1518,7 @@ - + @@ -1538,7 +1538,7 @@ - + @@ -1562,7 +1562,7 @@ - + @@ -1585,7 +1585,7 @@ - + @@ -1608,7 +1608,7 @@ - + @@ -1628,7 +1628,7 @@ - + @@ -1656,7 +1656,7 @@ - + @@ -1683,7 +1683,7 @@ - + @@ -1707,7 +1707,7 @@ - + @@ -1734,7 +1734,7 @@ - + @@ -1757,7 +1757,7 @@ - + @@ -1777,7 +1777,7 @@ - + @@ -1800,7 +1800,7 @@ - + @@ -1820,7 +1820,7 @@ - + @@ -1844,7 +1844,7 @@ - + @@ -1867,7 +1867,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml index 31518dd321..a6ed9068ca 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml @@ -19,7 +19,7 @@ [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + @@ -36,7 +36,7 @@ + [1580193900, 1580194800]]" id="90a3bd1c" points="[1580194800, 1580194800]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml index 80cb1834c0..cf3232d548 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml @@ -18,7 +18,7 @@ - + @@ -34,7 +34,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml index 68ec95555c..2aa1576fad 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml @@ -17,7 +17,7 @@ - + - + @@ -73,7 +73,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml index c6bc6f0419..3dc62cc8e9 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -96,7 +96,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -136,7 +136,7 @@ - + @@ -152,7 +152,7 @@ - + @@ -175,7 +175,7 @@ - + - + @@ -226,7 +226,7 @@ - + @@ -242,7 +242,7 @@ - + @@ -268,7 +268,7 @@ - + @@ -284,7 +284,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml index e6c99f9e50..9be61d489c 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -79,7 +79,7 @@ - + @@ -102,7 +102,7 @@ - + @@ -118,7 +118,7 @@ - + @@ -141,7 +141,7 @@ - + @@ -157,7 +157,7 @@ - + @@ -185,7 +185,7 @@ - + @@ -208,7 +208,7 @@ - + @@ -231,7 +231,7 @@ - + @@ -247,7 +247,7 @@ - + @@ -270,7 +270,7 @@ - + @@ -286,7 +286,7 @@ - + @@ -309,7 +309,7 @@ - + @@ -328,7 +328,7 @@ - + @@ -351,7 +351,7 @@ - + @@ -370,7 +370,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml index 2f52a93277..734beb7f47 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -70,7 +70,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -116,7 +116,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -162,7 +162,7 @@ - + @@ -178,7 +178,7 @@ - + @@ -208,7 +208,7 @@ - + @@ -224,7 +224,7 @@ - + @@ -247,7 +247,7 @@ - + @@ -270,7 +270,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml index b2b47715a2..56bfecc1b4 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -135,7 +135,7 @@ - + @@ -151,7 +151,7 @@ - + @@ -175,7 +175,7 @@ - + @@ -191,7 +191,7 @@ - + @@ -214,7 +214,7 @@ - + @@ -230,7 +230,7 @@ - + @@ -254,7 +254,7 @@ - + @@ -270,7 +270,7 @@ - + @@ -294,7 +294,7 @@ - + @@ -310,7 +310,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml index 4fb1371250..2eb83d787b 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml index 59776b5b74..4f4c986a39 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -97,7 +97,7 @@ - + @@ -113,7 +113,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml index 0fa98e3bb6..dd6102ea7f 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml @@ -19,7 +19,7 @@ [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + @@ -36,7 +36,7 @@ + [1580186700, 1580187600]]" id="90a3bd1c" points="[1580187600, 1580187600]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml index 3fdf646e70..be1e89a53d 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -97,7 +97,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -136,7 +136,7 @@ - + @@ -152,7 +152,7 @@ - + @@ -176,7 +176,7 @@ - + @@ -192,7 +192,7 @@ - + @@ -216,7 +216,7 @@ - + @@ -232,7 +232,7 @@ - + @@ -255,7 +255,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -295,7 +295,7 @@ - + @@ -311,7 +311,7 @@ - + @@ -335,7 +335,7 @@ - + @@ -351,7 +351,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml index edb0862676..9a3ff88df8 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml index 38f076f232..00bc65f236 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -134,7 +134,7 @@ - + @@ -150,7 +150,7 @@ - + @@ -173,7 +173,7 @@ - + @@ -189,7 +189,7 @@ - + @@ -212,7 +212,7 @@ - + @@ -228,7 +228,7 @@ - + @@ -251,7 +251,7 @@ - + @@ -267,7 +267,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml index 35bed38591..b2cf624214 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml index 4411ff9dd5..aaed20394f 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml @@ -26,7 +26,7 @@ - + @@ -42,7 +42,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml index 8759dac5c7..3a25dc86fc 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml @@ -17,7 +17,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml index 9b7e7582d0..fa4ab30a58 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml index ce549ab3cd..918a0c7ae5 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -96,7 +96,7 @@ - + @@ -112,7 +112,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml index 9385bfc9ae..3a6c3bf53c 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -70,7 +70,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -116,7 +116,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -162,7 +162,7 @@ - + @@ -178,7 +178,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml index a76971a1ed..eab889a8af 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -77,7 +77,7 @@ urban_roof, water]" shape="(10,)" standard_name="soil_type" units="Unit('unknown')" value_type="string"/> - + @@ -100,7 +100,7 @@ - + @@ -121,7 +121,7 @@ urban_roof, water]" shape="(10,)" standard_name="soil_type" units="Unit('unknown')" value_type="string"/> - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml index 09677ff57a..6ff6359046 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml @@ -18,7 +18,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -65,7 +65,7 @@ - + @@ -88,7 +88,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -134,7 +134,7 @@ - + @@ -157,7 +157,7 @@ - + @@ -180,7 +180,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml index 8a0f50700c..037cb5c2b6 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -109,7 +109,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -155,7 +155,7 @@ - + @@ -178,7 +178,7 @@ - + @@ -201,7 +201,7 @@ - + @@ -224,7 +224,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml index df2054e8af..5ca9920172 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml @@ -18,7 +18,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -64,7 +64,7 @@ - + @@ -87,7 +87,7 @@ - + @@ -110,7 +110,7 @@ - + @@ -133,7 +133,7 @@ - + @@ -156,7 +156,7 @@ - + @@ -179,7 +179,7 @@ - + @@ -202,7 +202,7 @@ - + @@ -225,7 +225,7 @@ - + @@ -249,7 +249,7 @@ - + @@ -272,7 +272,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml index 331ff59c74..91c40ea6d0 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml index aa14346e2f..3252dbf047 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml index 1756ac0205..d39fa0e367 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -98,7 +98,7 @@ - + @@ -114,7 +114,7 @@ - + @@ -137,7 +137,7 @@ - + @@ -153,7 +153,7 @@ - + @@ -176,7 +176,7 @@ - + @@ -192,7 +192,7 @@ - + @@ -215,7 +215,7 @@ - + @@ -231,7 +231,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml index f4710dd36d..4a5783ecb3 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml @@ -18,7 +18,7 @@ - + @@ -34,7 +34,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml index 57756ccc1d..d2c7e72848 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml @@ -17,7 +17,7 @@ - + @@ -37,7 +37,7 @@ - + - + diff --git a/lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml similarity index 86% rename from lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml rename to lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml index 7e2e6f4166..5cb621d5f3 100644 --- a/lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml +++ b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml @@ -4,7 +4,7 @@ + 300292.067778, 309797.084722]" shape="(5,)" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/pp_load_rules/global.cml b/lib/iris/tests/results/pp_load_rules/global.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/pp_load_rules/global.cml +++ b/lib/iris/tests/results/pp_load_rules/global.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml index 9e4b6d31f5..ecf51190c7 100644 --- a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml +++ b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml @@ -11,7 +11,7 @@ - + @@ -31,7 +31,7 @@ - + @@ -48,7 +48,7 @@ - + @@ -68,7 +68,7 @@ - + @@ -89,7 +89,7 @@ - + @@ -109,7 +109,7 @@ - + @@ -130,7 +130,7 @@ - + @@ -150,7 +150,7 @@ - + @@ -171,7 +171,7 @@ - + @@ -190,7 +190,7 @@ 850.0, 925.0, 950.0, 1000.0]" shape="(28,)" units="Unit('hPa')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml index 51b4682ebf..ece399df4e 100644 --- a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml +++ b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml @@ -11,7 +11,7 @@ - + @@ -35,7 +35,7 @@ - + diff --git a/lib/iris/tests/results/stock/realistic_4d.cml b/lib/iris/tests/results/stock/realistic_4d.cml index 88adbc43de..6640c54360 100644 --- a/lib/iris/tests/results/stock/realistic_4d.cml +++ b/lib/iris/tests/results/stock/realistic_4d.cml @@ -498,7 +498,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/system/supported_filetype_.grib2.cml b/lib/iris/tests/results/system/supported_filetype_.grib2.cml index f334b13863..5376af2fe1 100644 --- a/lib/iris/tests/results/system/supported_filetype_.grib2.cml +++ b/lib/iris/tests/results/system/supported_filetype_.grib2.cml @@ -9,7 +9,7 @@ - + - + diff --git a/lib/iris/tests/results/system/supported_filetype_.nc.cml b/lib/iris/tests/results/system/supported_filetype_.nc.cml index 595cd287ae..6ad0a3b176 100644 --- a/lib/iris/tests/results/system/supported_filetype_.nc.cml +++ b/lib/iris/tests/results/system/supported_filetype_.nc.cml @@ -36,7 +36,7 @@ - + diff --git a/lib/iris/tests/results/system/supported_filetype_.pp.cml b/lib/iris/tests/results/system/supported_filetype_.pp.cml index 838b9fad50..e457b2921e 100644 --- a/lib/iris/tests/results/system/supported_filetype_.pp.cml +++ b/lib/iris/tests/results/system/supported_filetype_.pp.cml @@ -6,7 +6,7 @@ - + - + diff --git a/lib/iris/tests/results/trajectory/constant_latitude.cml b/lib/iris/tests/results/trajectory/constant_latitude.cml index 7990edada5..38c208b825 100644 --- a/lib/iris/tests/results/trajectory/constant_latitude.cml +++ b/lib/iris/tests/results/trajectory/constant_latitude.cml @@ -1,6 +1,6 @@ - + @@ -12,17 +12,17 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + -0.1188, -0.1188]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float64"> - + @@ -90,10 +90,10 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/trajectory/hybrid_height.cml b/lib/iris/tests/results/trajectory/hybrid_height.cml index 63de9366dc..972fa7b330 100644 --- a/lib/iris/tests/results/trajectory/hybrid_height.cml +++ b/lib/iris/tests/results/trajectory/hybrid_height.cml @@ -54,7 +54,7 @@ [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" units="Unit('m')" value_type="int64"/> - + @@ -91,7 +91,7 @@ - + diff --git a/lib/iris/tests/results/trajectory/single_point.cml b/lib/iris/tests/results/trajectory/single_point.cml index 393ad5e335..64c71e0394 100644 --- a/lib/iris/tests/results/trajectory/single_point.cml +++ b/lib/iris/tests/results/trajectory/single_point.cml @@ -1,6 +1,6 @@ - + @@ -12,15 +12,15 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + - + @@ -88,10 +88,10 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/trajectory/zigzag.cml b/lib/iris/tests/results/trajectory/zigzag.cml index 250500786c..8a578c4ab4 100644 --- a/lib/iris/tests/results/trajectory/zigzag.cml +++ b/lib/iris/tests/results/trajectory/zigzag.cml @@ -1,6 +1,6 @@ - + @@ -11,22 +11,29 @@ - + - + - + @@ -48,10 +55,10 @@ - + - + diff --git a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml index e2a1ef2ea6..2592307cda 100644 --- a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml +++ b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml @@ -79,7 +79,7 @@ [0.996162, 0.993097]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482]" shape="(3,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml new file mode 100644 index 0000000000..9fc80a0e4d --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml @@ -0,0 +1,122 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml new file mode 100644 index 0000000000..9fc80a0e4d --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml @@ -0,0 +1,122 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml new file mode 100644 index 0000000000..9fc80a0e4d --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml @@ -0,0 +1,122 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml new file mode 100644 index 0000000000..9fc80a0e4d --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml @@ -0,0 +1,122 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml new file mode 100644 index 0000000000..82b7e25aa3 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt new file mode 100644 index 0000000000..e20527cb49 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt @@ -0,0 +1,24 @@ + + Mesh coordinates + + + + latitude + x + + + longitude + x + + + Mesh + + + + name + unknown + + + location + face + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt new file mode 100644 index 0000000000..e886d25e60 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + newlines-string + 'string\nwith\nnewlines' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt new file mode 100644 index 0000000000..e972e1d6df --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + long-string + 'long string.. long string.. long string.. long string.. long string.. long ...' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt new file mode 100644 index 0000000000..1736a083d6 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + multi-string + ['vector', 'of', 'strings'] + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt new file mode 100644 index 0000000000..8726d1f6ea --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + single-string + 'single string' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl index 1559cd2bff..ea9a1c283b 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl @@ -13,7 +13,7 @@ variables: mercator:longitude_of_projection_origin = 49. ; mercator:false_easting = 0. ; mercator:false_northing = 0. ; - mercator:scale_factor_at_projection_origin = 1. ; + mercator:standard_parallel = 0. ; int64 projection_y_coordinate(projection_y_coordinate) ; projection_y_coordinate:axis = "Y" ; projection_y_coordinate:units = "m" ; diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl index 8db60ca952..73b692ed63 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl @@ -10,7 +10,7 @@ variables: mercator:longitude_of_projection_origin = 49. ; mercator:false_easting = 0. ; mercator:false_northing = 0. ; - mercator:scale_factor_at_projection_origin = 1. ; + mercator:standard_parallel = 0. ; int64 projection_y_coordinate(projection_y_coordinate) ; projection_y_coordinate:axis = "Y" ; projection_y_coordinate:units = "m" ; diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl new file mode 100644 index 0000000000..a11dc60c30 --- /dev/null +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl @@ -0,0 +1,23 @@ +dimensions: + projection_x_coordinate = 4 ; + projection_y_coordinate = 3 ; +variables: + int64 air_pressure_anomaly(projection_y_coordinate, projection_x_coordinate) ; + air_pressure_anomaly:standard_name = "air_pressure_anomaly" ; + air_pressure_anomaly:grid_mapping = "stereographic" ; + int stereographic ; + stereographic:grid_mapping_name = "stereographic" ; + stereographic:longitude_of_projection_origin = 20. ; + stereographic:latitude_of_projection_origin = -10. ; + stereographic:false_easting = 500000. ; + stereographic:false_northing = -200000. ; + stereographic:scale_factor_at_projection_origin = 1.3 ; + int64 projection_y_coordinate(projection_y_coordinate) ; + projection_y_coordinate:axis = "Y" ; + projection_y_coordinate:units = "m" ; + projection_y_coordinate:standard_name = "projection_y_coordinate" ; + int64 projection_x_coordinate(projection_x_coordinate) ; + projection_x_coordinate:axis = "X" ; + projection_x_coordinate:units = "m" ; + projection_x_coordinate:standard_name = "projection_x_coordinate" ; +} diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl index 3c1033c17e..2159123553 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl @@ -13,7 +13,7 @@ variables: time:climatology = "time_climatology" ; time:units = "days since 1970-01-01 00:00:00-00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; diff --git a/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml new file mode 100644 index 0000000000..52aae1eb5e --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml new file mode 100644 index 0000000000..abaebd51d6 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml new file mode 100644 index 0000000000..bf8902bcb2 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml @@ -0,0 +1,22 @@ + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml new file mode 100644 index 0000000000..e1760775f9 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml index 1f9dfb0a14..e7c799f397 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml index 06c192f8a4..66cbc7206b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml index 9b654f6c6e..af298945f0 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml @@ -12,7 +12,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml index d5d05f15fd..44999e85b7 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml @@ -10,7 +10,7 @@ - + @@ -39,7 +39,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml index 1f4d8a4b2c..990fa0d7fe 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml @@ -10,7 +10,7 @@ - + @@ -39,7 +39,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml index 359cba997f..43789498c1 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl index 429da0807b..ddbbee5d34 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl index 429da0807b..ddbbee5d34 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl index f1c94dc834..cb026fd7ae 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/runner/_runner.py b/lib/iris/tests/runner/_runner.py index 3ef961d000..bfb2cc2402 100644 --- a/lib/iris/tests/runner/_runner.py +++ b/lib/iris/tests/runner/_runner.py @@ -10,17 +10,16 @@ # Because this file is imported by setup.py, there may be additional runtime # imports later in the file. -import multiprocessing import os import sys # NOTE: Do not inherit from object as distutils does not like it. class TestRunner: - """Run the Iris tests under nose and multiprocessor for performance""" + """Run the Iris tests under pytest and pytest-xdist for performance""" description = ( - "Run tests under nose and multiprocessor for performance. " + "Run tests under pytest and pytest-xdist for performance. " "Default behaviour is to run all non-gallery tests. " "Specifying one or more test flags will run *only* those " "tests." @@ -70,8 +69,8 @@ def initialize_options(self): self.create_missing = False def finalize_options(self): - # These enviroment variables will be propagated to all the - # processes that nose.run creates. + # These environment variables will be propagated to all the + # processes that pytest-xdist creates. if self.no_data: print("Running tests in no-data mode...") import iris.config @@ -95,25 +94,23 @@ def finalize_options(self): if self.stop: print("Stopping tests after the first error or failure") if self.num_processors is None: - # Choose a magic number that works reasonably well for the default - # number of processes. - self.num_processors = (multiprocessing.cpu_count() + 1) // 4 + 1 + self.num_processors = "auto" else: self.num_processors = int(self.num_processors) def run(self): - import nose + import pytest if hasattr(self, "distribution") and self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) tests = [] if self.system_tests: - tests.append("iris.tests.system_test") + tests.append("lib/iris/tests/system_test.py") if self.default_tests: - tests.append("iris.tests") + tests.append("lib/iris/tests") if self.coding_tests: - tests.append("iris.tests.test_coding_standards") + tests.append("lib/iris/tests/test_coding_standards.py") if self.gallery_tests: import iris.config @@ -129,35 +126,24 @@ def run(self): "WARNING: Gallery path %s does not exist." % (gallery_path) ) if not tests: - tests.append("iris.tests") - - regexp_pat = r"--match=^([Tt]est(?![Mm]ixin)|[Ss]ystem)" - - n_processors = max(self.num_processors, 1) + tests.append("lib/iris/tests") args = [ - "", None, - "--processes=%s" % n_processors, - "--verbosity=2", - regexp_pat, - "--process-timeout=180", + f"-n={self.num_processors}", ] if self.stop: - args.append("--stop") + args.append("-x") result = True for test in tests: - args[1] = test + args[0] = test print() print( - "Running test discovery on %s with %s processors." - % (test, n_processors) + f"Running test discovery on {test} with {self.num_processors} processors." ) - # run the tests at module level i.e. my_module.tests - # - test must start with test/Test and must not contain the - # word Mixin. - result &= nose.run(argv=args) + retcode = pytest.main(args=args) + result &= retcode.value == 0 if result is False: exit(1) diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index a46a5510f6..632dc95e20 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -20,7 +20,13 @@ from iris.coord_systems import GeogCS, RotatedGeogCS import iris.coords import iris.coords as icoords -from iris.coords import AuxCoord, CellMethod, DimCoord +from iris.coords import ( + AncillaryVariable, + AuxCoord, + CellMeasure, + CellMethod, + DimCoord, +) from iris.cube import Cube from ._stock_2d_latlons import ( # noqa @@ -99,7 +105,12 @@ def simple_1d(with_bounds=True): bounds = np.column_stack( [np.arange(11, dtype=np.int32), np.arange(11, dtype=np.int32) + 1] ) - coord = DimCoord(points, long_name="foo", units="1", bounds=bounds) + coord = DimCoord( + points, + long_name="foo", + units="1", + bounds=bounds if with_bounds else None, + ) cube.add_dim_coord(coord, 0) return cube @@ -399,6 +410,35 @@ def simple_2d_w_multidim_and_scalars(): return cube +def simple_2d_w_cell_measure_ancil_var(): + """ + Returns a two dimensional cube with a CellMeasure and AncillaryVariable. + + >>> print(simple_2d_w_cell_measure_ancil_var()) + thingness / (1) (bar: 3; foo: 4) + Dimension coordinates: + bar x - + foo - x + Cell measures: + cell_area x x + Ancillary variables: + quality_flag x - + Scalar coordinates: + wibble 1 + + """ + cube = simple_2d() + cube.add_aux_coord(AuxCoord([1], long_name="wibble"), None) + cube.add_ancillary_variable( + AncillaryVariable([1, 2, 3], standard_name="quality_flag"), 0 + ) + cube.add_cell_measure( + CellMeasure(np.arange(12).reshape(3, 4), standard_name="cell_area"), + (0, 1), + ) + return cube + + def hybrid_height(): """ Returns a two-dimensional (Z, X), hybrid-height cube. diff --git a/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl index b135546f2d..1e5522854e 100644 --- a/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl @@ -39,7 +39,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl index e4f32de7b7..9159bf6e46 100644 --- a/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl @@ -42,7 +42,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl index a193dbe451..f79ae0bdaf 100644 --- a/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl @@ -42,7 +42,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index ca15ee1c97..da226a3790 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -61,7 +61,11 @@ def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): units="degrees_east", long_name="long-name", var_name="var-name", - attributes={"a": 1, "b": "c"}, + attributes={ + # N.B. cast this so that a save-load roundtrip preserves it + "a": np.int64(1), + "b": "c", + }, ) node_y = AuxCoord(1200 + arr.arange(n_nodes), standard_name="latitude") diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index 030e90a0f3..8a448f7d34 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -8,10 +8,60 @@ from pathlib import Path from string import Template import subprocess +from typing import Optional +import dask +from dask import array as da import netCDF4 import numpy as np +from iris.tests import env_bin_path + +NCGEN_PATHSTR = str(env_bin_path("ncgen")) + + +def ncgen_from_cdl( + cdl_str: Optional[str], cdl_path: Optional[str], nc_path: str +): + """ + Generate a test netcdf file from cdl. + + Source is CDL in either a string or a file. + If given a string, will either save a CDL file, or pass text directly. + A netcdf output file is always created, at the given path. + + Parameters + ---------- + cdl_str : str or None + String containing a CDL description of a netcdf file. + If None, 'cdl_path' must be an existing file. + cdl_path : str or None + Path of temporary text file where cdl_str is written. + If None, 'cdl_str' must be given, and is piped direct to ncgen. + nc_path : str + Path of temporary netcdf file where converted result is put. + + Notes + ----- + For legacy reasons, the path args are 'str's not 'Path's. + + """ + if cdl_str and cdl_path: + with open(cdl_path, "w") as f_out: + f_out.write(cdl_str) + if cdl_path: + # Create netcdf from stored CDL file. + call_args = [NCGEN_PATHSTR, cdl_path, "-k3", "-o", nc_path] + call_kwargs = {} + else: + # No CDL file : pipe 'cdl_str' directly into the ncgen program. + if not cdl_str: + raise ValueError("Must provide either 'cdl_str' or 'cdl_path'.") + call_args = [NCGEN_PATHSTR, "-k3", "-o", nc_path] + call_kwargs = dict(input=cdl_str, encoding="ascii") + + subprocess.run(call_args, check=True, **call_kwargs) + def _file_from_cdl_template( temp_file_dir, dataset_name, dataset_type, template_subs @@ -37,12 +87,7 @@ def _file_from_cdl_template( # Spawn an "ncgen" command to create an actual NetCDF file from the # CDL string. - subprocess.run( - ["ncgen", "-o" + str(nc_write_path)], - input=cdl, - encoding="ascii", - check=True, - ) + ncgen_from_cdl(cdl_str=cdl, cdl_path=None, nc_path=nc_write_path) return nc_write_path @@ -58,7 +103,7 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): ds = netCDF4.Dataset(nc_path, "r+") unlimited_dim_names = [ - dim for dim in ds.dimensions if ds.dimensions[dim].size == 0 + dim for dim in ds.dimensions if ds.dimensions[dim].isunlimited() ] # Data addition dependent on this assumption: assert len(unlimited_dim_names) < 2 @@ -79,11 +124,13 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): # so it can be a dim-coord. data_size = np.prod(shape) data = np.arange(1, data_size + 1, dtype=var.dtype).reshape(shape) + var[:] = data else: # Fill with a plain value. But avoid zeros, so we can simulate # valid ugrid connectivities even when start_index=1. - data = np.ones(shape, dtype=var.dtype) # Do not use zero - var[:] = data + with dask.config.set({"array.chunk-size": "2048MiB"}): + data = da.ones(shape, dtype=var.dtype) # Do not use zero + da.store(data, var) ds.close() diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 36573362dd..745163b485 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -21,7 +21,7 @@ import iris -class SystemInitialTest(tests.IrisTest): +class TestSystemInitial(tests.IrisTest): def test_supported_filetypes(self): nx, ny = 60, 60 data = np.arange(nx * ny, dtype=">f4").reshape(nx, ny) @@ -51,7 +51,7 @@ def horiz_cs(): ) ) hours_since_epoch = cf_units.Unit( - "hours since epoch", cf_units.CALENDAR_GREGORIAN + "hours since epoch", cf_units.CALENDAR_STANDARD ) cm.add_aux_coord( iris.coords.AuxCoord( diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index 4e479f40f7..90bf0e5d4e 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -138,6 +138,101 @@ def setUp(self): self.cube_multi.add_dim_coord(coord_lon.copy(), 1) self.cube_multi.add_dim_coord(coord_lat.copy(), 2) + # + # masked cubes to test handling of masks + # + mask_single = np.vstack( + ( + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( + 26, axis=0 + ), + np.zeros([10, 3, 3]), + ) + ) + self.cube_single_masked = self.cube_single.copy( + ma.array(self.cube_single.data, mask=mask_single) + ) + mask_multi = np.vstack( + ( + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( + 16, axis=0 + ), + np.ones([2, 3, 3]), + np.zeros([2, 3, 3]), + ) + ) + self.cube_multi_masked = self.cube_multi.copy( + ma.array(self.cube_multi.data, mask=mask_multi) + ) + + # + # simple cubes for further tests + # + data_easy = np.array( + [[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]], + dtype=np.float32, + ) + self.cube_easy = iris.cube.Cube( + data_easy, long_name="temperature", units="kelvin" + ) + + llcs = iris.coord_systems.GeogCS(6371229) + self.cube_easy.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10], dtype=np.float32), + "latitude", + units="degrees", + coord_system=llcs, + ), + 0, + ) + self.cube_easy.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10, 10], dtype=np.float32), + "longitude", + units="degrees", + coord_system=llcs, + ), + 1, + ) + + data_easy_weighted = np.array( + [[3, 5, 7, 9], [0, 2, 4, 6]], + dtype=np.float32, + ) + self.cube_easy_weighted = iris.cube.Cube( + data_easy_weighted, long_name="temperature", units="kelvin" + ) + llcs = iris.coord_systems.GeogCS(6371229) + self.cube_easy_weighted.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 10], dtype=np.float32), + "latitude", + units="degrees", + coord_system=llcs, + ), + 0, + ) + self.cube_easy_weighted.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10, 10], dtype=np.float32), + "longitude", + units="degrees", + coord_system=llcs, + ), + 1, + ) + + # + # weights for weighted aggregate-by + # + self.weights_single = np.ones_like(z_points, dtype=np.float64) + self.weights_single[2] = 0.0 + self.weights_single[4:6] = 0.0 + + self.weights_multi = np.ones_like(z1_points, dtype=np.float64) + self.weights_multi[1:4] = 0.0 + # # expected data results # @@ -166,6 +261,31 @@ def setUp(self): ], dtype=np.float64, ) + self.weighted_single_expected = np.array( + [ + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]], + [[3.0, 6.0, 9.0], [12.0, 15.0, 18.0], [21.0, 24.0, 27.0]], + [[7.5, 15.0, 22.5], [30.0, 37.5, 45.0], [52.5, 60.0, 67.5]], + [[12.0, 24.0, 36.0], [48.0, 60.0, 72.0], [84.0, 96.0, 108.0]], + [ + [17.5, 35.0, 52.5], + [70.0, 87.5, 105.0], + [122.5, 140.0, 157.5], + ], + [ + [24.0, 48.0, 72.0], + [96.0, 120.0, 144.0], + [168.0, 192.0, 216.0], + ], + [ + [31.5, 63.0, 94.5], + [126.0, 157.5, 189.0], + [220.5, 252.0, 283.5], + ], + ], + dtype=np.float64, + ) row1 = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]] row2 = [ @@ -229,6 +349,28 @@ def setUp(self): ], dtype=np.float64, ) + self.weighted_multi_expected = np.array( + [ + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + [[4.0, 8.0, 12.0], [16.0, 20.0, 24.0], [28.0, 32.0, 36.0]], + [[14.0, 28.0, 42.0], [56.0, 70.0, 84.0], [98.0, 112.0, 126.0]], + [[7.0, 14.0, 21.0], [28.0, 35.0, 42.0], [49.0, 56.0, 63.0]], + [[9.0, 18.0, 27.0], [36.0, 45.0, 54.0], [63.0, 72.0, 81.0]], + [[10.5, 21.0, 31.5], [42.0, 52.5, 63.0], [73.5, 84.0, 94.5]], + [[13.0, 26.0, 39.0], [52.0, 65.0, 78.0], [91.0, 104.0, 117.0]], + [ + [15.0, 30.0, 45.0], + [60.0, 75.0, 90.0], + [105.0, 120.0, 135.0], + ], + [ + [16.5, 33.0, 49.5], + [66.0, 82.5, 99.0], + [115.5, 132.0, 148.5], + ], + ], + dtype=np.float64, + ) def test_single(self): # mean group-by with single coordinate name. @@ -271,6 +413,34 @@ def test_single(self): aggregateby_cube.data, self.single_rms_expected ) + def test_weighted_single(self): + # weighted mean group-by with single coordinate name. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + + # weighted mean group-by with single coordinate. + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_single_expected, + ) + def test_single_shared(self): z2_points = np.arange(36, dtype=np.int32) coord_z2 = iris.coords.AuxCoord( @@ -300,6 +470,38 @@ def test_single_shared(self): aggregateby_cube.data, self.single_expected ) + def test_weighted_single_shared(self): + z2_points = np.arange(36, dtype=np.int32) + coord_z2 = iris.coords.AuxCoord( + z2_points, long_name="wibble", units="1" + ) + self.cube_single.add_aux_coord(coord_z2, 0) + + # weighted group-by with single coordinate name on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_shared.cml"), + ) + + # weighted group-by with single coordinate on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_shared.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, self.weighted_single_expected + ) + def test_single_shared_circular(self): points = np.arange(36) * 10.0 circ_coord = iris.coords.DimCoord( @@ -329,6 +531,48 @@ def test_single_shared_circular(self): aggregateby_cube.data, self.single_expected ) + def test_weighted_single_shared_circular(self): + points = np.arange(36) * 10.0 + circ_coord = iris.coords.DimCoord( + points, long_name="circ_height", units="degrees", circular=True + ) + self.cube_single.add_aux_coord(circ_coord, 0) + + # weighted group-by with single coordinate name on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ( + "analysis", + "aggregated_by", + "weighted_single_shared_circular.cml", + ), + ) + + # weighted group-by with single coordinate on shared axis. + coord = self.cube_single.coords("height") + aggregateby_cube = self.cube_single.aggregated_by( + coord, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ( + "analysis", + "aggregated_by", + "weighted_single_shared_circular.cml", + ), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_single_expected, + ) + def test_multi(self): # group-by with multiple coordinate names. aggregateby_cube = self.cube_multi.aggregated_by( @@ -366,6 +610,55 @@ def test_multi(self): aggregateby_cube.data, self.multi_expected ) + def test_weighted_multi(self): + # weighted group-by with multiple coordinate names. + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinate names (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinates. + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinates (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_multi_expected, + ) + def test_multi_shared(self): z3_points = np.arange(20, dtype=np.int32) coord_z3 = iris.coords.AuxCoord( @@ -416,50 +709,91 @@ def test_multi_shared(self): aggregateby_cube.data, self.multi_expected ) - def test_easy(self): - data = np.array( - [[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]], - dtype=np.float32, + def test_weighted_multi_shared(self): + z3_points = np.arange(20, dtype=np.int32) + coord_z3 = iris.coords.AuxCoord( + z3_points, long_name="sigma", units="1" + ) + z4_points = np.arange(19, -1, -1, dtype=np.int32) + coord_z4 = iris.coords.AuxCoord( + z4_points, long_name="gamma", units="1" ) - cube = iris.cube.Cube(data, long_name="temperature", units="kelvin") - llcs = iris.coord_systems.GeogCS(6371229) - cube.add_aux_coord( - iris.coords.AuxCoord( - np.array([0, 0, 10], dtype=np.float32), - "latitude", - units="degrees", - coord_system=llcs, - ), - 0, + self.cube_multi.add_aux_coord(coord_z3, 0) + self.cube_multi.add_aux_coord(coord_z4, 0) + + # weighted group-by with multiple coordinate names on shared axis. + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, ) - cube.add_aux_coord( - iris.coords.AuxCoord( - np.array([0, 0, 10, 10], dtype=np.float32), - "longitude", - units="degrees", - coord_system=llcs, - ), - 1, + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) + # weighted group-by with multiple coordinate names on shared axis + # (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + + # weighted group-by with multiple coordinates on shared axis. + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + + # weighted group-by with multiple coordinates on shared axis (different + # order). + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_multi_expected, + ) + + def test_easy(self): # # Easy mean aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.MEAN) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.MEAN + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( [[8.0, 15.0], [10.0, 17.0], [15.0, 8.0]], dtype=np.float32 ), ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "easy.cml"), - checksum=False, ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.MEAN) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.MEAN + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -471,7 +805,9 @@ def test_easy(self): # # Easy max aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.MAX) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.MAX + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -479,7 +815,9 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.MAX) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.MAX + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -491,7 +829,9 @@ def test_easy(self): # # Easy sum aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.SUM) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.SUM + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -499,7 +839,9 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.SUM) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.SUM + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -511,7 +853,7 @@ def test_easy(self): # # Easy percentile aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by( + aggregateby_cube = self.cube_easy.aggregated_by( "longitude", iris.analysis.PERCENTILE, percent=25 ) np.testing.assert_almost_equal( @@ -521,7 +863,7 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by( + aggregateby_cube = self.cube_easy.aggregated_by( "latitude", iris.analysis.PERCENTILE, percent=25 ) np.testing.assert_almost_equal( @@ -535,7 +877,9 @@ def test_easy(self): # # Easy root mean square aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.RMS) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.RMS + ) row = [ list(np.sqrt([68.0, 234.0])), list(np.sqrt([104.0, 298.0])), @@ -545,7 +889,9 @@ def test_easy(self): aggregateby_cube.data, np.array(row, dtype=np.float32) ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.RMS) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.RMS + ) row = [ list(np.sqrt([50.0, 122.0, 170.0, 362.0])), [18.0, 12.0, 10.0, 6.0], @@ -554,17 +900,109 @@ def test_easy(self): aggregateby_cube.data, np.array(row, dtype=np.float32) ) + def test_weighted_easy(self): + # Use different weights for lat and lon to avoid division by zero. + lon_weights = np.array( + [[1, 0, 1, 1], [9, 1, 2, 0]], + dtype=np.float32, + ) + lat_weights = np.array([2.0, 2.0]) + + # + # Easy weighted mean aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.MEAN, weights=lon_weights + ) + + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_easy.cml"), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", + iris.analysis.MEAN, + weights=lat_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], + dtype=np.float32, + ), + ) + + # + # Easy weighted sum aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.SUM, weights=lon_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 16.0], [2.0, 8.0]], dtype=np.float32), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", + iris.analysis.SUM, + weights=lat_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[6.0, 10.0, 14.0, 18.0], [0.0, 4.0, 8.0, 12.0]], + dtype=np.float32, + ), + ) + + # + # Easy weighted percentile aggregate test for longitude. + # Note: Not possible for latitude since at least two values for each + # category are necessary. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", + iris.analysis.WPERCENTILE, + percent=50, + weights=lon_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), + ) + + # + # Easy weighted root mean square aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.RMS, weights=lon_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, np.sqrt(65.0)], [np.sqrt(0.4), 4.0]], dtype=np.float32 + ), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", iris.analysis.RMS, weights=lat_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], + dtype=np.float32, + ), + ) + def test_single_missing(self): # aggregation correctly handles masked data - mask = np.vstack( - ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 26, axis=0 - ), - np.zeros([10, 3, 3]), - ) - ) - self.cube_single.data = ma.array(self.cube_single.data, mask=mask) single_expected = ma.masked_invalid( [ [ @@ -609,30 +1047,81 @@ def test_single_missing(self): ], ] ) - aggregateby_cube = self.cube_single.aggregated_by( + aggregateby_cube = self.cube_single_masked.aggregated_by( "height", iris.analysis.MEAN ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_missing.cml"), - checksum=False, ) self.assertMaskedArrayAlmostEqual( aggregateby_cube.data, single_expected ) + def test_weighted_single_missing(self): + # weighted aggregation correctly handles masked data + weighted_single_expected = ma.masked_invalid( + [ + [ + [0.0, np.nan, 0.0], + [np.nan, 0.0, np.nan], + [0.0, np.nan, 0.0], + ], + [ + [1.0, np.nan, 3.0], + [np.nan, 5.0, np.nan], + [7.0, np.nan, 9.0], + ], + [ + [3.0, np.nan, 9.0], + [np.nan, 15.0, np.nan], + [21.0, np.nan, 27.0], + ], + [ + [7.5, np.nan, 22.5], + [np.nan, 37.5, np.nan], + [52.5, np.nan, 67.5], + ], + [ + [12.0, np.nan, 36.0], + [np.nan, 60.0, np.nan], + [84.0, np.nan, 108.0], + ], + [ + [17.5, np.nan, 52.5], + [np.nan, 87.5, np.nan], + [122.5, np.nan, 157.5], + ], + [ + [24.0, 53.0, 72.0], + [106.0, 120.0, 159.0], + [168.0, 212.0, 216.0], + ], + [ + [31.5, 63.0, 94.5], + [126.0, 157.5, 189.0], + [220.5, 252.0, 283.5], + ], + ] + ) + aggregateby_cube = self.cube_single_masked.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_missing.cml"), + ) + self.assertMaskedArrayAlmostEqual( + aggregateby_cube.data, + weighted_single_expected, + ) + def test_multi_missing(self): # aggregation correctly handles masked data - mask = np.vstack( - ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 16, axis=0 - ), - np.ones([2, 3, 3]), - np.zeros([2, 3, 3]), - ) - ) - self.cube_multi.data = ma.array(self.cube_multi.data, mask=mask) multi_expected = ma.masked_invalid( [ [ @@ -682,32 +1171,160 @@ def test_multi_missing(self): ], ] ) - aggregateby_cube = self.cube_multi.aggregated_by( + aggregateby_cube = self.cube_multi_masked.aggregated_by( ["height", "level"], iris.analysis.MEAN ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "multi_missing.cml"), - checksum=False, ) self.assertMaskedArrayAlmostEqual( aggregateby_cube.data, multi_expected ) - def test_returned_weights(self): + def test_weighted_multi_missing(self): + # weighted aggregation correctly handles masked data + weighted_multi_expected = ma.masked_invalid( + [ + [ + [0.0, np.nan, 0.0], + [np.nan, 0.0, np.nan], + [0.0, np.nan, 0.0], + ], + [ + [4.0, np.nan, 12.0], + [np.nan, 20.0, np.nan], + [28.0, np.nan, 36.0], + ], + [ + [14.0, 37.0, 42.0], + [74.0, 70.0, 111.0], + [98.0, 148.0, 126.0], + ], + [ + [7.0, np.nan, 21.0], + [np.nan, 35.0, np.nan], + [49.0, np.nan, 63.0], + ], + [ + [9.0, np.nan, 27.0], + [np.nan, 45.0, np.nan], + [63.0, np.nan, 81.0], + ], + [ + [10.5, np.nan, 31.5], + [np.nan, 52.5, np.nan], + [73.5, np.nan, 94.5], + ], + [ + [13.0, np.nan, 39.0], + [np.nan, 65.0, np.nan], + [91.0, np.nan, 117.0], + ], + [ + [15.0, np.nan, 45.0], + [np.nan, 75.0, np.nan], + [105.0, np.nan, 135.0], + ], + [ + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan], + ], + ] + ) + aggregateby_cube = self.cube_multi_masked.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_missing.cml"), + ) + self.assertMaskedArrayAlmostEqual( + aggregateby_cube.data, + weighted_multi_expected, + ) + + def test_returned_true_single(self): + aggregateby_output = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + returned=True, + weights=self.weights_single, + ) + self.assertTrue(isinstance(aggregateby_output, tuple)) + + aggregateby_cube = aggregateby_output[0] + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + + aggregateby_weights = aggregateby_output[1] + expected_weights = np.array( + [ + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[4.0, 4.0, 4.0], [4.0, 4.0, 4.0], [4.0, 4.0, 4.0]], + [[5.0, 5.0, 5.0], [5.0, 5.0, 5.0], [5.0, 5.0, 5.0]], + [[6.0, 6.0, 6.0], [6.0, 6.0, 6.0], [6.0, 6.0, 6.0]], + [[7.0, 7.0, 7.0], [7.0, 7.0, 7.0], [7.0, 7.0, 7.0]], + [[8.0, 8.0, 8.0], [8.0, 8.0, 8.0], [8.0, 8.0, 8.0]], + ] + ) + np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + + def test_returned_true_multi(self): + aggregateby_output = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + returned=True, + weights=self.weights_multi, + ) + self.assertTrue(isinstance(aggregateby_output, tuple)) + + aggregateby_cube = aggregateby_output[0] + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + aggregateby_weights = aggregateby_output[1] + expected_weights = np.array( + [ + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], + ] + ) + np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + + def test_returned_fails_with_non_weighted_aggregator(self): self.assertRaises( - ValueError, + TypeError, self.cube_single.aggregated_by, "height", - iris.analysis.MEAN, + iris.analysis.MAX, returned=True, ) + + def test_weights_fail_with_non_weighted_aggregator(self): self.assertRaises( - ValueError, + TypeError, self.cube_single.aggregated_by, "height", - iris.analysis.MEAN, - weights=[1, 2, 3, 4, 5], + iris.analysis.MAX, + weights=self.weights_single, ) diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index d5a810d2fa..e0a5d0971e 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -9,6 +9,7 @@ import iris.tests as tests # isort:skip import cf_units +import dask.array as da import numpy as np import numpy.ma as ma @@ -19,6 +20,7 @@ import iris.coords import iris.cube import iris.tests.stock +import iris.util class TestAnalysisCubeCoordComparison(tests.IrisTest): @@ -931,6 +933,106 @@ def test_count_2d(self): gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False ) + def test_max_run_1d(self): + cube = tests.stock.simple_1d() + # [ 0 1 2 3 4 5 6 7 8 9 10] + result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), + ) + self.assertArrayEqual(result.data, np.array(3)) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) + + def test_max_run_lazy(self): + cube = tests.stock.simple_1d() + # [ 0 1 2 3 4 5 6 7 8 9 10] + # Make data lazy + cube.data = da.from_array(cube.data) + result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), + ) + self.assertTrue(result.has_lazy_data()) + # Realise data + _ = result.data + self.assertArrayEqual(result.data, np.array(3)) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) + + def test_max_run_2d(self): + cube = tests.stock.simple_2d() + # [[ 0 1 2 3] + # [ 4 5 6 7] + # [ 8 9 10 11]] + foo_result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + self.assertArrayEqual( + foo_result.data, np.array([1, 2, 1], dtype=np.float32) + ) + self.assertEqual(foo_result.units, 1) + self.assertTupleEqual(foo_result.cell_methods, ()) + self.assertCML( + foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False + ) + + bar_result = cube.collapsed( + "bar", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + self.assertArrayEqual( + bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32) + ) + self.assertEqual(bar_result.units, 1) + self.assertTupleEqual(bar_result.cell_methods, ()) + self.assertCML( + bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False + ) + + with self.assertRaises(ValueError): + _ = cube.collapsed( + ("foo", "bar"), + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + + def test_max_run_masked(self): + cube = tests.stock.simple_2d() + # [[ 0 1 2 3] + # [ 4 5 6 7] + # [ 8 9 10 11]] + iris.util.mask_cube( + cube, np.isin(cube.data, [0, 2, 3, 5, 7, 11]), in_place=True + ) + # [[-- 1 -- --] + # [ 4 -- 6 --] + # [ 8 9 10 --]] + result = cube.collapsed( + "bar", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 9, 10, 11]), + ) + self.assertArrayEqual( + result.data, np.array([1, 1, 2, 0], dtype=np.float32) + ) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_bar_2d_masked.cml"), checksum=False + ) + def test_weighted_sum_consistency(self): # weighted sum with unit weights should be the same as a sum cube = tests.stock.simple_1d() @@ -1562,5 +1664,43 @@ def test_mean_with_weights(self): self.assertArrayAlmostEqual(expected_result, res_cube.data) +class TestCreateWeightedAggregatorFn(tests.IrisTest): + @staticmethod + def aggregator_fn(data, axis, **kwargs): + return (data, axis, kwargs) + + def test_no_weights_supplied(self): + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, test_kwarg="test" + ) + output = aggregator_fn("dummy_array", None) + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test"}) + + def test_weights_supplied(self): + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, test_kwarg="test" + ) + output = aggregator_fn("dummy_array", "w") + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) + + def test_weights_in_kwargs(self): + kwargs = {"test_kwarg": "test", "weights": "ignored"} + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, **kwargs + ) + output = aggregator_fn("dummy_array", "w") + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) + self.assertEqual(kwargs, {"test_kwarg": "test", "weights": "ignored"}) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index e753adbae8..6c08dc1f9e 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -249,7 +249,7 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) self.assertCMLApproxData(a, ("analysis", "apply_ufunc_original.cml")) @@ -259,14 +259,14 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=True, ) self.assertCMLApproxData(b, ("analysis", "apply_ufunc.cml")) self.assertCMLApproxData(a, ("analysis", "apply_ufunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -295,7 +295,7 @@ def test_apply_ufunc_fail(self): def test_ifunc(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) b = my_ifunc(a, new_name="squared temperature", in_place=False) self.assertCMLApproxData(a, ("analysis", "apply_ifunc_original.cml")) @@ -307,7 +307,7 @@ def test_ifunc(self): self.assertCMLApproxData(a, ("analysis", "apply_ifunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -347,7 +347,7 @@ def test_ifunc_init_fail(self): def test_ifunc_call_fail(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) # should now NOT fail because giving 2 arguments to an ifunc that # expects only one will now ignore the surplus argument and raise @@ -367,7 +367,7 @@ def test_ifunc_call_fail(self): my_ifunc(a) my_ifunc = iris.analysis.maths.IFunc( - lambda a: (a, a ** 2.0), lambda cube: cf_units.Unit("1") + lambda a: (a, a**2.0), lambda cube: cf_units.Unit("1") ) # should fail because data function returns a tuple @@ -553,9 +553,9 @@ def test_square_root(self): a.data = abs(a.data) a.units **= 2 - e = a ** 0.5 + e = a**0.5 - self.assertArrayAllClose(e.data, a.data ** 0.5) + self.assertArrayAllClose(e.data, a.data**0.5) self.assertCML(e, ("analysis", "sqrt.cml"), checksum=False) self.assertRaises(ValueError, iris.analysis.maths.exponentiate, a, 0.3) @@ -585,26 +585,26 @@ def test_apply_ufunc(self): np.square, a, new_name="more_thingness", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) b = iris.analysis.maths.apply_ufunc(vec_mag_ufunc, a, c) - ans = a.data ** 2 + c.data ** 2 - b2 = b ** 2 + ans = a.data**2 + c.data**2 + b2 = b**2 self.assertArrayAlmostEqual(b2.data, ans) @@ -617,17 +617,17 @@ def test_ifunc(self): a = self.cube a.units = cf_units.Unit("meters") - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units**2) b = my_ifunc(a, new_name="more_thingness", in_place=False) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -637,12 +637,12 @@ def vec_mag(u, v): ) b = my_ifunc(a, c) - ans = (a.data ** 2 + c.data ** 2) ** 0.5 + ans = (a.data**2 + c.data**2) ** 0.5 self.assertArrayAlmostEqual(b.data, ans) def vec_mag_data_func(u_data, v_data): - return np.sqrt(u_data ** 2 + v_data ** 2) + return np.sqrt(u_data**2 + v_data**2) vec_mag_ifunc = iris.analysis.maths.IFunc( vec_mag_data_func, lambda a, b: (a + b).units @@ -687,12 +687,12 @@ def setUp(self): self.data_1u = np.array([[9, 9, 9], [8, 8, 8]], dtype=np.uint64) self.data_2u = np.array([[3, 3, 3], [2, 2, 2]], dtype=np.uint64) - self.cube_1f = Cube(self.data_1f) - self.cube_2f = Cube(self.data_2f) - self.cube_1i = Cube(self.data_1i) - self.cube_2i = Cube(self.data_2i) - self.cube_1u = Cube(self.data_1u) - self.cube_2u = Cube(self.data_2u) + self.cube_1f = Cube(self.data_1f.copy()) + self.cube_2f = Cube(self.data_2f.copy()) + self.cube_1i = Cube(self.data_1i.copy()) + self.cube_2i = Cube(self.data_2i.copy()) + self.cube_1u = Cube(self.data_1u.copy()) + self.cube_2u = Cube(self.data_2u.copy()) self.ops = (operator.add, operator.sub, operator.mul, operator.truediv) self.iops = ( diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index 64a7924ce4..0615dc39bf 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -692,7 +692,7 @@ def test_cube_iteration(self): pass def test_not_iterable(self): - self.assertFalse(isinstance(self.t, collections.Iterable)) + self.assertFalse(isinstance(self.t, collections.abc.Iterable)) class Test2dSlicing(TestCube2d): diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 89fa2d20c6..034fb1dbda 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -11,6 +11,8 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip +import contextlib +import io from unittest import mock import iris @@ -267,6 +269,34 @@ def test_variable_attribute_touch_pass_0(self): ), ) + def test_destructor(self): + """Test the destructor when reading the dataset fails. + Related to issue #3312: previously, the `CFReader` would + always call `close()` on its `_dataset` attribute, even if it + didn't exist because opening the dataset had failed. + """ + with self.temp_filename(suffix=".nc") as fn: + + with open(fn, "wb+") as fh: + + fh.write( + b"\x89HDF\r\n\x1a\nBroken file with correct signature" + ) + fh.flush() + + with io.StringIO() as buf: + with contextlib.redirect_stderr(buf): + try: + _ = cf.CFReader(fn) + except OSError: + pass + try: + _ = iris.load_cubes(fn) + except OSError: + pass + buf.seek(0) + self.assertMultiLineEqual("", buf.read()) + @tests.skip_data class TestLoad(tests.IrisTest): diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index 4f9e48fb83..1972cdeb90 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -91,7 +91,7 @@ def test_cell_different_bounds(self): self.assertEqual(len(sub_list), 0) -class TestMixin: +class ConstraintMixin: """ Mix-in class for attributes & utilities common to the "normal" and "strict" test cases. @@ -134,7 +134,7 @@ def setUp(self): self.lat_gt_45 = iris.Constraint(latitude=lambda c: c > 45) -class RelaxedConstraintMixin(TestMixin): +class RelaxedConstraintMixin(ConstraintMixin): @staticmethod def fixup_sigma_to_be_aux(cubes): # XXX Fix the cubes such that the sigma coordinate is always an AuxCoord. Pending gh issue #18 @@ -296,11 +296,11 @@ def load_match(self, files, constraints): @tests.skip_data -class TestCubeExtract__names(TestMixin, tests.IrisTest): +class TestCubeExtract__names(ConstraintMixin, tests.IrisTest): def setUp(self): fname = iris.sample_data_path("atlantic_profiles.nc") self.cubes = iris.load(fname) - TestMixin.setUp(self) + ConstraintMixin.setUp(self) cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" @@ -353,11 +353,11 @@ def test_unknown(self): @tests.skip_data -class TestCubeExtract__name_constraint(TestMixin, tests.IrisTest): +class TestCubeExtract__name_constraint(ConstraintMixin, tests.IrisTest): def setUp(self): fname = iris.sample_data_path("atlantic_profiles.nc") self.cubes = iris.load(fname) - TestMixin.setUp(self) + ConstraintMixin.setUp(self) cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" @@ -579,9 +579,9 @@ def test_unknown(self): @tests.skip_data -class TestCubeExtract(TestMixin, tests.IrisTest): +class TestCubeExtract(ConstraintMixin, tests.IrisTest): def setUp(self): - TestMixin.setUp(self) + ConstraintMixin.setUp(self) self.cube = iris.load_cube(self.theta_path) def test_attribute_constraint(self): @@ -644,7 +644,7 @@ def test_non_existent_coordinate(self): @tests.skip_data -class TestConstraints(TestMixin, tests.IrisTest): +class TestConstraints(ConstraintMixin, tests.IrisTest): def test_constraint_expressions(self): rt = repr(self.theta) rl10 = repr(self.level_10) diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 87270b524c..ea99ae06df 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -49,7 +49,11 @@ def test_slice_multiple_indices(self): def test_slice_reverse(self): b = self.lat[::-1] np.testing.assert_array_equal(b.points, self.lat.points[::-1]) - np.testing.assert_array_equal(b.bounds, self.lat.bounds[::-1, :]) + np.testing.assert_array_equal(b.bounds, self.lat.bounds[::-1, ::-1]) + + # Check contiguity is preserved. + self.assertTrue(self.lat.is_contiguous()) + self.assertTrue(b.is_contiguous()) c = b[::-1] self.assertEqual(self.lat, c) diff --git a/lib/iris/tests/test_coord_categorisation.py b/lib/iris/tests/test_coord_categorisation.py index 616da882f5..0206ba66a5 100644 --- a/lib/iris/tests/test_coord_categorisation.py +++ b/lib/iris/tests/test_coord_categorisation.py @@ -52,7 +52,7 @@ def setUp(self): time_coord = iris.coords.DimCoord( day_numbers, standard_name="time", - units=cf_units.Unit("days since epoch", "gregorian"), + units=cf_units.Unit("days since epoch", "standard"), ) cube.add_dim_coord(time_coord, 0) diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 4229125969..7cd15297cc 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -14,7 +14,6 @@ GeogCS, LambertConformal, RotatedGeogCS, - Stereographic, TransverseMercator, ) import iris.coords @@ -33,16 +32,6 @@ def osgb(): ) -def stereo(): - return Stereographic( - central_lat=-90, - central_lon=-45, - false_easting=100, - false_northing=200, - ellipsoid=GeogCS(6377563.396, 6356256.909), - ) - - class TestCoordSystemLookup(tests.IrisTest): def setUp(self): self.cube = iris.tests.stock.lat_lon_cube() @@ -87,7 +76,7 @@ def test_simple(self): def test_different_class(self): a = self.cs1 b = self.cs3 - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) def test_different_public_attributes(self): a = self.cs1 @@ -98,7 +87,7 @@ def test_different_public_attributes(self): self.assertEqual(a.foo, "a") # a and b should not be the same - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) # a and b should be the same b.foo = "a" @@ -106,7 +95,7 @@ def test_different_public_attributes(self): b.foo = "b" # a and b should not be the same - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) class Test_CoordSystem_xml_element(tests.IrisTest): @@ -216,12 +205,152 @@ def test_as_cartopy_crs(self): cs = GeogCS(6543210, 6500000) res = cs.as_cartopy_crs() globe = ccrs.Globe( - semimajor_axis=6543210.0, semiminor_axis=6500000.0, ellipse=None + semimajor_axis=6543210.0, + semiminor_axis=6500000.0, + ellipse=None, ) expected = ccrs.Geodetic(globe) self.assertEqual(res, expected) +class Test_GeogCS_equality(tests.IrisTest): + """Test cached values don't break GeogCS equality""" + + def test_as_cartopy_globe(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_globe = cs_mut.as_cartopy_globe() + new_globe = cs_mut.as_cartopy_globe() + + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_as_cartopy_projection(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_projection = cs_mut.as_cartopy_projection() + initial_globe = initial_projection.globe + new_projection = cs_mut.as_cartopy_projection() + new_globe = new_projection.globe + + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_as_cartopy_crs(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_crs = cs_mut.as_cartopy_crs() + initial_globe = initial_crs.globe + new_crs = cs_mut.as_cartopy_crs() + new_globe = new_crs.globe + + self.assertIs(new_crs, initial_crs) + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_update_to_equivalent(self): + cs_const = GeogCS(6500000, 6000000) + # Cause caching + _ = cs_const.as_cartopy_crs() + + cs_mut = GeogCS(6543210, 6000000) + # Cause caching + _ = cs_mut.as_cartopy_crs() + # Set value + cs_mut.semi_major_axis = 6500000 + cs_mut.inverse_flattening = 13 + + self.assertEqual(cs_const.semi_major_axis, 6500000) + self.assertEqual(cs_mut.semi_major_axis, 6500000) + self.assertEqual(cs_const, cs_mut) + + +class Test_GeogCS_mutation(tests.IrisTest): + "Test that altering attributes of a GeogCS instance behaves as expected" + + def test_semi_major_axis_change(self): + # Clear datum + # Clear caches + cs = GeogCS.from_datum("OSGB 1936") + _ = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_major_axis = 6000000 + self.assertIsNone(cs.datum) + self.assertEqual(cs.as_cartopy_globe().semimajor_axis, 6000000) + + def test_semi_major_axis_no_change(self): + # Datum untouched + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_major_axis = 6377563.396 + self.assertEqual(cs.datum, "OSGB 1936") + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_semi_minor_axis_change(self): + # Clear datum + # Clear caches + cs = GeogCS.from_datum("OSGB 1936") + _ = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_minor_axis = 6000000 + self.assertIsNone(cs.datum) + self.assertEqual(cs.as_cartopy_globe().semiminor_axis, 6000000) + + def test_semi_minor_axis_no_change(self): + # Datum untouched + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_minor_axis = 6356256.909237285 + self.assertEqual(cs.datum, "OSGB 1936") + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_datum_change(self): + # Semi-major axis changes + # All internal ellipoid values set to None + # CRS changes + cs = GeogCS(6543210, 6500000) + _ = cs.as_cartopy_crs() + self.assertTrue("_globe" in cs.__dict__) + self.assertTrue("_crs" in cs.__dict__) + self.assertEqual(cs.semi_major_axis, 6543210) + cs.datum = "OSGB 1936" + self.assertEqual(cs.as_cartopy_crs().datum, "OSGB 1936") + self.assertIsNone(cs.__dict__["_semi_major_axis"]) + self.assertIsNone(cs.__dict__["_semi_minor_axis"]) + self.assertIsNone(cs.__dict__["_inverse_flattening"]) + self.assertEqual(cs.semi_major_axis, 6377563.396) + + def test_datum_no_change(self): + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + cs.datum = "OSGB 1936" + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_inverse_flattening_change(self): + # Caches untouched + # Axes unchanged (this behaviour is odd, but matches existing behaviour) + # Warning about lack of effect on other aspects + cs = GeogCS(6543210, 6500000) + initial_crs = cs.as_cartopy_crs() + with self.assertWarnsRegex( + UserWarning, + "Setting inverse_flattening does not affect other properties of the GeogCS object.", + ): + cs.inverse_flattening = cs.inverse_flattening + 1 + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + self.assertEqual(cs.semi_major_axis, 6543210) + self.assertEqual(cs.semi_minor_axis, 6500000) + + class Test_RotatedGeogCS_construction(tests.IrisTest): def test_init(self): rcs = RotatedGeogCS( @@ -243,7 +372,10 @@ def test_init(self): class Test_RotatedGeogCS_repr(tests.IrisTest): def test_repr(self): rcs = RotatedGeogCS( - 30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229) + 30, + 40, + north_pole_grid_longitude=50, + ellipsoid=GeogCS(6371229), ) expected = ( "RotatedGeogCS(30.0, 40.0, " @@ -263,7 +395,10 @@ def test_repr(self): class Test_RotatedGeogCS_str(tests.IrisTest): def test_str(self): rcs = RotatedGeogCS( - 30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229) + 30, + 40, + north_pole_grid_longitude=50, + ellipsoid=GeogCS(6371229), ) expected = ( "RotatedGeogCS(30.0, 40.0, " @@ -373,85 +508,6 @@ def test_as_cartopy_projection(self): self.assertEqual(res, expected) -class Test_Stereographic_construction(tests.IrisTest): - def test_stereo(self): - st = stereo() - self.assertXMLElement(st, ("coord_systems", "Stereographic.xml")) - - -class Test_Stereographic_repr(tests.IrisTest): - def test_stereo(self): - st = stereo() - expected = ( - "Stereographic(central_lat=-90.0, central_lon=-45.0, " - "false_easting=100.0, false_northing=200.0, true_scale_lat=None, " - "ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))" - ) - self.assertEqual(expected, repr(st)) - - -class Test_Stereographic_as_cartopy_crs(tests.IrisTest): - def test_as_cartopy_crs(self): - latitude_of_projection_origin = -90.0 - longitude_of_projection_origin = -45.0 - false_easting = 100.0 - false_northing = 200.0 - ellipsoid = GeogCS(6377563.396, 6356256.909) - - st = Stereographic( - central_lat=latitude_of_projection_origin, - central_lon=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = ccrs.Stereographic( - central_latitude=latitude_of_projection_origin, - central_longitude=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - ) - - res = st.as_cartopy_crs() - self.assertEqual(res, expected) - - -class Test_Stereographic_as_cartopy_projection(tests.IrisTest): - def test_as_cartopy_projection(self): - latitude_of_projection_origin = -90.0 - longitude_of_projection_origin = -45.0 - false_easting = 100.0 - false_northing = 200.0 - ellipsoid = GeogCS(6377563.396, 6356256.909) - - st = Stereographic( - central_lat=latitude_of_projection_origin, - central_lon=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = ccrs.Stereographic( - central_latitude=latitude_of_projection_origin, - central_longitude=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - ) - - res = st.as_cartopy_projection() - self.assertEqual(res, expected) - - class Test_LambertConformal(tests.GraphicsTest): def test_fail_secant_latitudes_none(self): emsg = "secant latitudes" @@ -488,5 +544,23 @@ def test_south_cutoff(self): self.assertEqual(ccrs.cutoff, 30) +class Test_Datums(tests.IrisTest): + def test_default_none(self): + cs = GeogCS(6543210, 6500000) # Arbitrary radii + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual(cartopy_crs.datum.name, "unknown") + + def test_set_persist(self): + cs = GeogCS.from_datum(datum="WGS84") + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual( + cartopy_crs.datum.name, "World Geodetic System 1984" + ) + + cs = GeogCS.from_datum(datum="OSGB36") + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual(cartopy_crs.datum.name, "OSGB 1936") + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index 3b751cfcbe..216637202a 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -201,7 +201,7 @@ def test_bytesio(self): data = infile.read() # Compare files - self.assertEquals( + self.assertEqual( data, sio.getvalue(), "Mismatch in data when comparing iris bytesio save " diff --git a/lib/iris/tests/test_image_json.py b/lib/iris/tests/test_image_json.py index 7c5c824ffe..b5213156f8 100644 --- a/lib/iris/tests/test_image_json.py +++ b/lib/iris/tests/test_image_json.py @@ -8,56 +8,42 @@ # importing anything else import iris.tests as tests # isort:skip -import codecs -import itertools -import json -import os +from pathlib import Path -import requests +import iris.tests.graphics as graphics -@tests.skip_inet +@tests.skip_data class TestImageFile(tests.IrisTest): - def test_resolve(self): - listingfile_uri = ( - "https://raw.githubusercontent.com/SciTools/test-iris-imagehash" - "/gh-pages/v4_files_listing.txt" - ) - req = requests.get(listingfile_uri) - if req.status_code != 200: - raise ValueError( - "GET failed on image listings file: {}".format(listingfile_uri) - ) - - listings_text = req.content.decode("utf-8") - reference_image_filenames = [ - line.strip() for line in listings_text.split("\n") + def test_json(self): + # get test names from json + repo_names = [*graphics.read_repo_json().keys()] + # get file names from test data + test_data_names = [ + pp.stem for pp in Path(tests.get_data_path(["images"])).iterdir() ] - base = "https://scitools.github.io/test-iris-imagehash/images/v4" - reference_image_uris = set( - "{}/{}".format(base, name) for name in reference_image_filenames - ) - - imagerepo_json_filepath = os.path.join( - os.path.dirname(__file__), "results", "imagerepo.json" - ) - with open(imagerepo_json_filepath, "rb") as fi: - imagerepo = json.load(codecs.getreader("utf-8")(fi)) - - # "imagerepo" maps key: list-of-uris. Put all the uris in one big set. - tests_uris = set(itertools.chain.from_iterable(imagerepo.values())) - - missing_refs = list(tests_uris - reference_image_uris) - n_missing_refs = len(missing_refs) - if n_missing_refs > 0: + # compare + repo_name_set = set(repo_names) + self.assertEqual(len(repo_names), len(repo_name_set)) + test_data_name_set = set(test_data_names) + self.assertEqual(len(test_data_names), len(test_data_name_set)) + missing_from_json = test_data_name_set - repo_name_set + if missing_from_json: + amsg = ( + "Missing images: Images are present in the iris-test-data " + "repo, that are not referenced in imagerepo.json" + ) + # Always fails when we get here: report the problem. + self.assertEqual(missing_from_json, set(), msg=amsg) + missing_from_test_data = repo_name_set - test_data_name_set + if missing_from_test_data: amsg = ( - "Missing images: These {} image uris are referenced in " - "imagerepo.json, but not listed in {} : " + "Missing images: Image names are referenced in " + "imagerepo.json, that are not present in the iris-test-data " + "repo" ) - amsg = amsg.format(n_missing_refs, listingfile_uri) - amsg += "".join("\n {}".format(uri) for uri in missing_refs) # Always fails when we get here: report the problem. - self.assertEqual(n_missing_refs, 0, msg=amsg) + self.assertEqual(missing_from_test_data, set(), msg=amsg) if __name__ == "__main__": diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py new file mode 100644 index 0000000000..d1ebc9a36a --- /dev/null +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -0,0 +1,48 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +import unittest + +from iris._lazy_data import as_lazy_data +from iris.tests import test_aggregate_by + + +# Simply redo the tests of test_aggregate_by.py with lazy data +class TestLazyAggregateBy(test_aggregate_by.TestAggregateBy): + def setUp(self): + super().setUp() + + self.cube_single.data = as_lazy_data(self.cube_single.data) + self.cube_multi.data = as_lazy_data(self.cube_multi.data) + self.cube_single_masked.data = as_lazy_data( + self.cube_single_masked.data + ) + self.cube_multi_masked.data = as_lazy_data(self.cube_multi_masked.data) + self.cube_easy.data = as_lazy_data(self.cube_easy.data) + self.cube_easy_weighted.data = as_lazy_data( + self.cube_easy_weighted.data + ) + + assert self.cube_single.has_lazy_data() + assert self.cube_multi.has_lazy_data() + assert self.cube_single_masked.has_lazy_data() + assert self.cube_multi_masked.has_lazy_data() + assert self.cube_easy.has_lazy_data() + assert self.cube_easy_weighted.has_lazy_data() + + def tearDown(self): + super().tearDown() + + # Note: weighted easy cube is not expected to have lazy data since + # WPERCENTILE is not lazy. + assert self.cube_single.has_lazy_data() + assert self.cube_multi.has_lazy_data() + assert self.cube_single_masked.has_lazy_data() + assert self.cube_multi_masked.has_lazy_data() + assert self.cube_easy.has_lazy_data() + + +if __name__ == "__main__": + unittest.main() diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 86ff2f1ece..4749236abc 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -12,6 +12,9 @@ import iris.tests as tests # isort:skip import pathlib +from unittest import mock + +import netCDF4 import iris import iris.io @@ -148,19 +151,20 @@ def test_path_object(self): self.assertEqual(len(cubes), 1) -class TestOpenDAP(tests.IrisTest): - def test_load(self): - # Check that calling iris.load_* with a http URI triggers a call to - # ``iris.io.load_http`` +class TestOPeNDAP(tests.IrisTest): + def setUp(self): + self.url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" - url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" + def test_load_http_called(self): + # Check that calling iris.load_* with an http URI triggers a call to + # ``iris.io.load_http`` class LoadHTTPCalled(Exception): pass def new_load_http(passed_urls, *args, **kwargs): self.assertEqual(len(passed_urls), 1) - self.assertEqual(url, passed_urls[0]) + self.assertEqual(self.url, passed_urls[0]) raise LoadHTTPCalled() try: @@ -174,11 +178,29 @@ def new_load_http(passed_urls, *args, **kwargs): iris.load_cubes, ]: with self.assertRaises(LoadHTTPCalled): - fn(url) + fn(self.url) finally: iris.io.load_http = orig + @tests.skip_data + def test_netCDF_Dataset_call(self): + # Check that load_http calls netCDF4.Dataset and supplies the expected URL. + + # To avoid making a request to an OPeNDAP server in a test, instead + # mock the call to netCDF.Dataset so that it returns a dataset for a + # local file. + filename = tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + fake_dataset = netCDF4.Dataset(filename) + + with mock.patch( + "netCDF4.Dataset", return_value=fake_dataset + ) as dataset_loader: + next(iris.io.load_http([self.url], callback=None)) + dataset_loader.assert_called_with(self.url, mode="r") + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2c22c6d088..5017698a22 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -16,7 +16,6 @@ import os.path import shutil import stat -from subprocess import check_call import tempfile from unittest import mock @@ -33,6 +32,7 @@ from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.tests.stock as stock +from iris.tests.stock.netcdf import ncgen_from_cdl import iris.util @@ -218,6 +218,26 @@ def test_load_merc_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_merc.cml")) + def test_load_complex_merc_grid(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes false easting and northing and a standard parallel + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "false_east_north_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_false.cml")) + + def test_load_merc_grid_non_unit_scale_factor(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes a non-unit scale factor at projection origin + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "non_unit_scale_factor_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_scale_factor.cml")) + def test_load_stereographic_grid(self): # Test loading a single CF-netCDF file with a stereographic # grid_mapping. @@ -228,6 +248,16 @@ def test_load_stereographic_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_stereo.cml")) + def test_load_polar_stereographic_grid(self): + # Test loading a single CF-netCDF file with a polar stereographic + # grid_mapping. + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "polar", "toa_brightness_temperature.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_polar.cml")) + def test_cell_methods(self): # Test exercising CF-netCDF cell method parsing. cubes = iris.load( @@ -333,12 +363,8 @@ def test_um_stash_source(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) @@ -382,12 +408,8 @@ def test_ukmo__um_stash_source_priority(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) @@ -427,12 +449,8 @@ def test_bad_um_stash_source(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) diff --git a/lib/iris/tests/test_pandas.py b/lib/iris/tests/test_pandas.py index af62ad23d3..f47df75def 100644 --- a/lib/iris/tests/test_pandas.py +++ b/lib/iris/tests/test_pandas.py @@ -10,12 +10,16 @@ import copy import datetime -import unittest +from termios import IXOFF # noqa: F401 import cf_units import cftime import matplotlib.units import numpy as np +import pytest + +import iris +from iris._deprecation import IrisDeprecation # Importing pandas has the side-effect of messing with the formatters # used by matplotlib for handling dates. @@ -27,13 +31,14 @@ pandas = None matplotlib.units.registry = default_units_registry -skip_pandas = unittest.skipIf( - pandas is None, 'Test(s) require "pandas", ' "which is not available." +skip_pandas = pytest.mark.skipif( + pandas is None, + reason='Test(s) require "pandas", ' "which is not available.", ) if pandas is not None: - from iris.coords import DimCoord - from iris.cube import Cube + from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord + from iris.cube import Cube, CubeList import iris.pandas @@ -63,7 +68,7 @@ def test_masked(self): series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data.astype("f").filled(np.nan)) - def test_time_gregorian(self): + def test_time_standard(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") time_coord = DimCoord( [0, 100.1, 200.2, 300.3, 400.4], @@ -80,7 +85,7 @@ def test_time_gregorian(self): ] series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) - self.assertListEqual(list(series.index), expected_index) + assert list(series.index) == expected_index def test_time_360(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") @@ -107,37 +112,37 @@ def test_copy_true(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") series = iris.pandas.as_series(cube) series[0] = 99 - self.assertEqual(cube.data[0], 0) + assert cube.data[0] == 0 def test_copy_int32_false(self): cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") series = iris.pandas.as_series(cube, copy=False) series[0] = 99 - self.assertEqual(cube.data[0], 99) + assert cube.data[0] == 99 def test_copy_int64_false(self): cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") series = iris.pandas.as_series(cube, copy=False) series[0] = 99 - self.assertEqual(cube.data[0], 99) + assert cube.data[0] == 99 def test_copy_float_false(self): cube = Cube(np.array([0, 1, 2, 3.3, 4]), long_name="foo") series = iris.pandas.as_series(cube, copy=False) series[0] = 99 - self.assertEqual(cube.data[0], 99) + assert cube.data[0] == 99 def test_copy_masked_true(self): data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) cube = Cube(data, long_name="foo") series = iris.pandas.as_series(cube) series[0] = 99 - self.assertEqual(cube.data[0], 0) + assert cube.data[0] == 0 def test_copy_masked_false(self): data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) cube = Cube(data, long_name="foo") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = iris.pandas.as_series(cube, copy=False) @@ -210,7 +215,7 @@ def test_masked(self): self.assertArrayEqual(data_frame.index, expected_index) self.assertArrayEqual(data_frame.columns, expected_columns) - def test_time_gregorian(self): + def test_time_standard(self): cube = Cube( np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts" ) @@ -230,8 +235,8 @@ def test_time_gregorian(self): ) for day_offset in day_offsets ] - self.assertTrue(all(data_frame.columns == timestamps)) - self.assertTrue(all(data_frame.index == [0, 1])) + assert all(data_frame.columns == timestamps) + assert all(data_frame.index == [0, 1]) def test_time_360(self): cube = Cube( @@ -261,7 +266,7 @@ def test_copy_true(self): ) data_frame = iris.pandas.as_data_frame(cube) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 0) + assert cube.data[0, 0] == 0 def test_copy_int32_false(self): cube = Cube( @@ -270,7 +275,7 @@ def test_copy_int32_false(self): ) data_frame = iris.pandas.as_data_frame(cube, copy=False) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 99) + assert cube.data[0, 0] == 99 def test_copy_int64_false(self): cube = Cube( @@ -279,7 +284,7 @@ def test_copy_int64_false(self): ) data_frame = iris.pandas.as_data_frame(cube, copy=False) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 99) + assert cube.data[0, 0] == 99 def test_copy_float_false(self): cube = Cube( @@ -287,7 +292,7 @@ def test_copy_float_false(self): ) data_frame = iris.pandas.as_data_frame(cube, copy=False) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 99) + assert cube.data[0, 0] == 99 def test_copy_masked_true(self): data = np.ma.MaskedArray( @@ -297,7 +302,7 @@ def test_copy_masked_true(self): cube = Cube(data, long_name="foo") data_frame = iris.pandas.as_data_frame(cube) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 0) + assert cube.data[0, 0] == 0 def test_copy_masked_false(self): data = np.ma.MaskedArray( @@ -305,7 +310,7 @@ def test_copy_masked_false(self): mask=[[0, 1, 0, 1, 0], [1, 0, 1, 0, 1]], ) cube = Cube(data, long_name="foo") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = iris.pandas.as_data_frame(cube, copy=False) def test_copy_false_with_cube_view(self): @@ -313,10 +318,13 @@ def test_copy_false_with_cube_view(self): cube = Cube(data[:], long_name="foo") data_frame = iris.pandas.as_data_frame(cube, copy=False) data_frame[0][0] = 99 - self.assertEqual(cube.data[0, 0], 99) + assert cube.data[0, 0] == 99 @skip_pandas +@pytest.mark.filterwarnings( + "ignore:.*as_cube has been deprecated.*:iris._deprecation.IrisDeprecation" +) class TestSeriesAsCube(tests.IrisTest): def test_series_simple(self): series = pandas.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) @@ -348,7 +356,7 @@ def test_series_masked(self): tests.get_result_path(("pandas", "as_cube", "series_masked.cml")), ) - def test_series_datetime_gregorian(self): + def test_series_datetime_standard(self): series = pandas.Series( [0, 1, 2, 3, 4], index=[ @@ -362,7 +370,7 @@ def test_series_datetime_gregorian(self): self.assertCML( iris.pandas.as_cube(series), tests.get_result_path( - ("pandas", "as_cube", "series_datetime_gregorian.cml") + ("pandas", "as_cube", "series_datetime_standard.cml") ), ) @@ -390,16 +398,19 @@ def test_copy_true(self): series = pandas.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) cube = iris.pandas.as_cube(series) cube.data[0] = 99 - self.assertEqual(series[5], 0) + assert series[5] == 0 def test_copy_false(self): series = pandas.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) cube = iris.pandas.as_cube(series, copy=False) cube.data[0] = 99 - self.assertEqual(series[5], 99) + assert series[5] == 99 @skip_pandas +@pytest.mark.filterwarnings( + "ignore:.*as_cube has been deprecated.*:iris._deprecation.IrisDeprecation" +) class TestDataFrameAsCube(tests.IrisTest): def test_data_frame_simple(self): data_frame = pandas.DataFrame( @@ -471,7 +482,7 @@ def test_data_frame_cftime_360(self): ), ) - def test_data_frame_datetime_gregorian(self): + def test_data_frame_datetime_standard(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[ @@ -483,7 +494,7 @@ def test_data_frame_datetime_gregorian(self): self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path( - ("pandas", "as_cube", "data_frame_datetime_gregorian.cml") + ("pandas", "as_cube", "data_frame_datetime_standard.cml") ), ) @@ -491,13 +502,461 @@ def test_copy_true(self): data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) cube = iris.pandas.as_cube(data_frame) cube.data[0, 0] = 99 - self.assertEqual(data_frame[0][0], 0) + assert data_frame[0][0] == 0 def test_copy_false(self): data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) cube = iris.pandas.as_cube(data_frame, copy=False) cube.data[0, 0] = 99 - self.assertEqual(data_frame[0][0], 99) + assert data_frame[0][0] == 99 + + +@skip_pandas +class TestFutureAndDeprecation(tests.IrisTest): + def test_deprecation_warning(self): + data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) + with pytest.warns( + IrisDeprecation, match="as_cube has been deprecated" + ): + _ = iris.pandas.as_cube(data_frame) + + # Tests for FUTURE are expected when as_dataframe() is made n-dimensional. + + +@skip_pandas +class TestPandasAsCubes(tests.IrisTest): + @staticmethod + def _create_pandas(index_levels=0, is_series=False): + index_length = 3 + + index_names = [f"index_{i}" for i in range(index_levels)] + index_values = [ + np.arange(index_length) * 10 * (i + 1) for i in range(index_levels) + ] + + if index_levels == 1: + index = pandas.Index(index_values[0], name=index_names[0]) + data_length = index_length + elif index_levels > 1: + index = pandas.MultiIndex.from_product( + index_values, names=index_names + ) + data_length = index.nunique() + else: + index = None + data_length = index_length + + data = np.arange(data_length) * 10 + + if is_series: + class_ = pandas.Series + else: + class_ = pandas.DataFrame + + return class_(data, index=index) + + def test_1d_no_index(self): + df = self._create_pandas() + result = iris.pandas.as_cubes(df) + + expected_coord = DimCoord(df.index.values) + expected_cube = Cube( + data=df[0].values, + long_name=str(df[0].name), + dim_coords_and_dims=[(expected_coord, 0)], + ) + assert result == [expected_cube] + + def test_1d_with_index(self): + df = self._create_pandas(index_levels=1) + result = iris.pandas.as_cubes(df) + + expected_coord = DimCoord(df.index.values, long_name=df.index.name) + (result_cube,) = result + assert result_cube.dim_coords == (expected_coord,) + + def test_1d_series_no_index(self): + series = self._create_pandas(is_series=True) + result = iris.pandas.as_cubes(series) + + expected_coord = DimCoord(series.index.values) + expected_cube = Cube( + data=series.values, dim_coords_and_dims=[(expected_coord, 0)] + ) + assert result == [expected_cube] + + def test_1d_series_with_index(self): + series = self._create_pandas(index_levels=1, is_series=True) + result = iris.pandas.as_cubes(series) + + expected_coord = DimCoord( + series.index.values, long_name=series.index.name + ) + (result_cube,) = result + assert result_cube.dim_coords == (expected_coord,) + + def test_3d(self): + df = self._create_pandas(index_levels=3) + result = iris.pandas.as_cubes(df) + + expected_coords = [ + DimCoord(level.values, long_name=level.name) + for level in df.index.levels + ] + (result_cube,) = result + assert result_cube.dim_coords == tuple(expected_coords) + + def test_3d_series(self): + series = self._create_pandas(index_levels=3, is_series=True) + result = iris.pandas.as_cubes(series) + + expected_coords = [ + DimCoord(level.values, long_name=level.name) + for level in series.index.levels + ] + (result_cube,) = result + assert result_cube.dim_coords == tuple(expected_coords) + + def test_non_unique_index(self): + df = self._create_pandas(index_levels=1) + new_index = df.index.values + new_index[1] = new_index[0] + df.set_index(new_index) + + with pytest.raises(ValueError, match="not unique per row"): + _ = iris.pandas.as_cubes(df) + + def test_non_monotonic_index(self): + df = self._create_pandas(index_levels=1) + new_index = df.index.values + new_index[:2] = new_index[1::-1] + df.set_index(new_index) + + with pytest.raises(ValueError, match="not monotonic"): + _ = iris.pandas.as_cubes(df) + + def test_missing_rows(self): + df = self._create_pandas(index_levels=2) + df = df[:-1] + + with pytest.raises( + ValueError, match="Not all index values have a corresponding row" + ): + _ = iris.pandas.as_cubes(df) + + def test_aux_coord(self): + df = self._create_pandas() + coord_name = "foo" + df[coord_name] = df.index.values + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + expected_aux_coord = AuxCoord( + df[coord_name].values, long_name=coord_name + ) + (result_cube,) = result + assert result_cube.aux_coords == (expected_aux_coord,) + + def test_cell_measure(self): + df = self._create_pandas() + coord_name = "foo" + df[coord_name] = df.index.values + result = iris.pandas.as_cubes(df, cell_measure_cols=[coord_name]) + + expected_cm = CellMeasure(df[coord_name].values, long_name=coord_name) + (result_cube,) = result + assert result_cube.cell_measures() == [expected_cm] + + def test_ancillary_variable(self): + df = self._create_pandas() + coord_name = "foo" + df[coord_name] = df.index.values + result = iris.pandas.as_cubes(df, ancillary_variable_cols=[coord_name]) + + expected_av = AncillaryVariable( + df[coord_name].values, long_name=coord_name + ) + (result_cube,) = result + assert result_cube.ancillary_variables() == [expected_av] + + def test_3d_with_2d_coord(self): + df = self._create_pandas(index_levels=3) + coord_shape = df.index.levshape[:2] + coord_values = np.arange(np.product(coord_shape)) + coord_name = "foo" + df[coord_name] = coord_values.repeat(df.index.levshape[-1]) + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + expected_points = coord_values.reshape(coord_shape) + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + self.assertArrayEqual(result_coord.points, expected_points) + assert result_coord.cube_dims(result_cube) == (0, 1) + + def test_coord_varies_all_indices(self): + df = self._create_pandas(index_levels=3) + coord_shape = df.index.levshape + coord_values = np.arange(np.product(coord_shape)) + coord_name = "foo" + df[coord_name] = coord_values + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + expected_points = coord_values.reshape(coord_shape) + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + self.assertArrayEqual(result_coord.points, expected_points) + assert result_coord.cube_dims(result_cube) == (0, 1, 2) + + def test_category_coord(self): + # Something that varies on a dimension, but doesn't change with every + # increment. + df = self._create_pandas(index_levels=2) + coord_shape = df.index.levshape + coord_values = np.arange(np.product(coord_shape)) + coord_name = "foo" + + # Create a repeating value along a dimension. + step = coord_shape[-1] + coord_values[1::step] = coord_values[::step] + + df[coord_name] = coord_values + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + expected_points = coord_values.reshape(coord_shape) + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + self.assertArrayEqual(result_coord.points, expected_points) + assert result_coord.cube_dims(result_cube) == (0, 1) + + def test_scalar_coord(self): + df = self._create_pandas() + coord_values = np.ones(len(df)) + coord_name = "foo" + df[coord_name] = coord_values + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + expected_points = np.unique(coord_values) + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + self.assertArrayEqual(result_coord.points, expected_points) + assert result_coord.cube_dims(result_cube) == tuple() + + def test_multi_phenom(self): + df = self._create_pandas() + new_name = "new_phenom" + df[new_name] = df[0] + result = iris.pandas.as_cubes(df) + + # Note the shared coord object between both Cubes. + expected_coord = DimCoord(df.index.values) + expected_cube_kwargs = dict(dim_coords_and_dims=[(expected_coord, 0)]) + + expected_cube_0 = Cube( + data=df[0].values, + long_name=str(df[0].name), + **expected_cube_kwargs, + ) + expected_cube_1 = Cube( + data=df[new_name].values, + long_name=new_name, + **expected_cube_kwargs, + ) + assert result == [expected_cube_0, expected_cube_1] + + def test_empty_series(self): + series = pandas.Series(dtype=object) + result = iris.pandas.as_cubes(series) + + assert result == CubeList() + + def test_empty_dataframe(self): + df = pandas.DataFrame() + result = iris.pandas.as_cubes(df) + + assert result == CubeList() + + def test_no_phenom(self): + df = self._create_pandas() + # Specify the only column as an AuxCoord. + result = iris.pandas.as_cubes(df, aux_coord_cols=[0]) + + assert result == CubeList() + + def test_standard_name_phenom(self): + # long_name behaviour is tested in test_1d_no_index. + df = self._create_pandas() + new_name = "air_temperature" + df = df.rename(columns={0: new_name}) + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + assert result_cube.standard_name == new_name + + def test_standard_name_coord(self): + # long_name behaviour is tested in test_1d_with_index. + df = self._create_pandas() + new_name = "longitude" + df.index.names = [new_name] + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + result_coord = result_cube.coord(dim_coords=True) + assert result_coord.standard_name == new_name + + def test_dtype_preserved_phenom(self): + df = self._create_pandas() + df = df.astype("int32") + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + assert result_cube.dtype == np.int32 + + def test_preserve_dim_order(self): + new_order = ["index_1", "index_0", "index_2"] + + df = self._create_pandas(index_levels=3) + df = df.reset_index() + df = df.set_index(new_order) + df = df.sort_index() + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + dim_order = [c.name() for c in result_cube.dim_coords] + assert dim_order == new_order + + def test_dtype_preserved_coord(self): + df = self._create_pandas() + new_index = df.index.astype("float64") + df.index = new_index + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + result_coord = result_cube.coord(dim_coords=True) + assert result_coord.dtype == np.float64 + + def test_string_phenom(self): + # Strings can be uniquely troublesome. + df = self._create_pandas() + new_values = [str(v) for v in df[0]] + df[0] = new_values + result = iris.pandas.as_cubes(df) + + (result_cube,) = result + self.assertArrayEqual(result_cube.data, new_values) + + def test_string_coord(self): + # Strings can be uniquely troublesome. + # Must test using an AuxCoord since strings cannot be DimCoords. + df = self._create_pandas() + new_points = [str(v) for v in df.index.values] + coord_name = "foo" + df[coord_name] = new_points + result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) + + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + self.assertArrayEqual(result_coord.points, new_points) + + def test_series_with_col_args(self): + series = self._create_pandas(is_series=True) + with pytest.warns(Warning, match="is a Series; ignoring"): + _ = iris.pandas.as_cubes(series, aux_coord_cols=["some_column"]) + + def test_phenom_view(self): + df = self._create_pandas() + result = iris.pandas.as_cubes(df, copy=False) + + # Modify AFTER creating the Cube(s). + df[0][0] += 1 + + (result_cube,) = result + assert result_cube.data[0] == df[0][0] + + def test_phenom_copy(self): + df = self._create_pandas() + result = iris.pandas.as_cubes(df) + + # Modify AFTER creating the Cube(s). + df[0][0] += 1 + + (result_cube,) = result + assert result_cube.data[0] != df[0][0] + + def test_coord_never_view(self): + # Using AuxCoord - DimCoords and Pandas indices are immutable. + df = self._create_pandas() + coord_name = "foo" + df[coord_name] = df.index.values + result = iris.pandas.as_cubes( + df, copy=False, aux_coord_cols=[coord_name] + ) + + # Modify AFTER creating the Cube(s). + df[coord_name][0] += 1 + + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + assert result_coord.points[0] != df[coord_name][0] + + def _test_dates_common(self, mode=None, alt_calendar=False): + df = self._create_pandas() + kwargs = dict(pandas_structure=df) + coord_name = "dates" + + if alt_calendar: + calendar = cf_units.CALENDAR_360_DAY + # Only pass this when non-default. + kwargs["calendars"] = {coord_name: calendar} + expected_points = [8640, 8641, 8642] + else: + calendar = cf_units.CALENDAR_STANDARD + expected_points = [8760, 8761, 8762] + expected_units = cf_units.Unit( + "hours since 1970-01-01 00:00:00", calendar=calendar + ) + + datetime_args = [(1971, 1, 1, i, 0, 0) for i in df.index.values] + if mode == "index": + values = [datetime.datetime(*a) for a in datetime_args] + df.index = pandas.Index(values, name=coord_name) + elif mode == "numpy": + values = [datetime.datetime(*a) for a in datetime_args] + df[coord_name] = values + kwargs["aux_coord_cols"] = [coord_name] + elif mode == "cftime": + values = [ + cftime.datetime(*a, calendar=calendar) for a in datetime_args + ] + df[coord_name] = values + kwargs["aux_coord_cols"] = [coord_name] + else: + raise ValueError("mode needs to be set") + + result = iris.pandas.as_cubes(**kwargs) + + (result_cube,) = result + result_coord = result_cube.coord(coord_name) + assert result_coord.units == expected_units + self.assertArrayEqual(result_coord.points, expected_points) + + def test_datetime_index(self): + self._test_dates_common(mode="index") + + def test_datetime_index_calendar(self): + self._test_dates_common(mode="index", alt_calendar=True) + + def test_numpy_datetime_coord(self): + # NumPy format is what happens if a Python datetime is assigned to a + # Pandas column. + self._test_dates_common(mode="numpy") + + def test_numpy_datetime_coord_calendar(self): + self._test_dates_common(mode="numpy", alt_calendar=True) + + def test_cftime_coord(self): + self._test_dates_common(mode="cftime") + + def test_cftime_coord_calendar(self): + self._test_dates_common(mode="cftime", alt_calendar=True) if __name__ == "__main__": diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 2a08635ae0..77aea2b6b6 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -16,6 +16,7 @@ import numpy as np import iris +import iris.analysis import iris.coords as coords import iris.tests.stock @@ -324,6 +325,127 @@ def setUp(self): self.draw_method = qplt.scatter +@tests.skip_data +@tests.skip_plot +class Test2dPoints(tests.GraphicsTest): + def setUp(self): + super().setUp() + pp_file = tests.get_data_path(("PP", "globClim1", "u_wind.pp")) + self.cube = iris.load(pp_file)[0][0] + + def test_circular_changes(self): + # Circular + iplt.pcolormesh(self.cube, vmax=50) + iplt.points(self.cube, s=self.cube.data) + plt.gca().coastlines() + + self.check_graphic() + + +@tests.skip_data +@tests.skip_plot +class Test1dFillBetween(tests.GraphicsTest): + def setUp(self): + super().setUp() + self.cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "testing", "small_theta_colpex.nc") + ), + "air_potential_temperature", + )[0, 0] + self.draw_method = iplt.fill_between + + def test_coord_coord(self): + x = self.cube.coord("grid_latitude") + y1 = self.cube.coord("surface_altitude")[:, 0] + y2 = self.cube.coord("surface_altitude")[:, 1] + self.draw_method(x, y1, y2) + self.check_graphic() + + def test_coord_cube(self): + x = self.cube.coord("grid_latitude") + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) + self.draw_method(x, y1, y2) + self.check_graphic() + + def test_cube_coord(self): + x = self.cube.collapsed("grid_longitude", iris.analysis.MEAN) + y1 = self.cube.coord("surface_altitude")[:, 0] + y2 = y1 + 10 + self.draw_method(x, y1, y2) + self.check_graphic() + + def test_cube_cube(self): + x = self.cube.collapsed("grid_longitude", iris.analysis.MEAN) + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) + self.draw_method(x, y1, y2) + self.check_graphic() + + def test_incompatible_objects_x_odd(self): + # cubes/coordinates of different sizes cannot be plotted + x = self.cube.coord("grid_latitude")[:-1] + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) + with self.assertRaises(ValueError): + self.draw_method(x, y1, y2) + + def test_incompatible_objects_y1_odd(self): + # cubes/coordinates of different sizes cannot be plotted + x = self.cube.coord("grid_latitude") + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN)[:-1] + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) + with self.assertRaises(ValueError): + self.draw_method(x, y1, y2) + + def test_incompatible_objects_y2_odd(self): + # cubes/coordinates of different sizes cannot be plotted + x = self.cube.coord("grid_latitude") + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX)[:-1] + with self.assertRaises(ValueError): + self.draw_method(x, y1, y2) + + def test_incompatible_objects_all_odd(self): + # cubes/coordinates of different sizes cannot be plotted + x = self.cube.coord("grid_latitude") + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN)[:-1] + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX)[:-2] + with self.assertRaises(ValueError): + self.draw_method(x, y1, y2) + + def test_multidimensional(self): + # multidimensional cubes/coordinates are not allowed + x = self.cube.coord("grid_latitude") + y1 = self.cube + y2 = self.cube + with self.assertRaises(ValueError): + self.draw_method(x, y1, y2) + + def test_not_cube_or_coord(self): + # inputs must be cubes or coordinates + x = np.arange(self.cube.shape[0]) + y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) + y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) + with self.assertRaises(TypeError): + self.draw_method(x, y1, y2) + + +@tests.skip_data +@tests.skip_plot +class Test1dQuickplotFillBetween(Test1dFillBetween): + def setUp(self): + tests.GraphicsTest.setUp(self) + self.cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "testing", "small_theta_colpex.nc") + ), + "air_potential_temperature", + )[0, 0] + self.draw_method = qplt.fill_between + + @tests.skip_data @tests.skip_plot class TestAttributePositive(tests.GraphicsTest): diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index dec71a99ac..06f170c666 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -247,5 +247,39 @@ def test_not_reference_time_units(self): self.check_graphic() +@tests.skip_data +@tests.skip_plot +class TestSubplotColorbar(tests.IrisTest): + def setUp(self): + theta = _load_theta() + coords = ["model_level_number", "grid_longitude"] + self.data = next(theta.slices(coords)) + spec = (1, 1, 1) + self.figure1 = plt.figure() + self.axes1 = self.figure1.add_subplot(*spec) + self.figure2 = plt.figure() + self.axes2 = self.figure2.add_subplot(*spec) + + def _check(self, mappable, figure, axes): + self.assertIs(mappable.axes, axes) + self.assertIs(mappable.colorbar.mappable, mappable) + self.assertIs(mappable.colorbar.ax.get_figure(), figure) + + def test_with_axes1(self): + # plot using the first figure subplot axes (explicit) + mappable = qplt.contourf(self.data, axes=self.axes1) + self._check(mappable, self.figure1, self.axes1) + + def test_with_axes2(self): + # plot using the second figure subplot axes (explicit) + mappable = qplt.contourf(self.data, axes=self.axes2) + self._check(mappable, self.figure2, self.axes2) + + def test_without_axes__default(self): + # plot using the second/last figure subplot axes (default) + mappable = qplt.contourf(self.data) + self._check(mappable, self.figure2, self.axes2) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index ec7f8d1023..db182ae3f3 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -144,7 +144,7 @@ def test_invalid_clip_lengths(self): def test_default_values(self): # Get the default values specified in the function - argspec = inspect.getargspec(iris.util.clip_string) + argspec = inspect.getfullargspec(iris.util.clip_string) arg_dict = dict(zip(argspec.args[-2:], argspec.defaults)) result = iris.util.clip_string( diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 9e3af90603..7bd8fdb597 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -343,8 +343,8 @@ def test_orig_coords(self): def test_magnitude_preservation(self): u, v = self._uv_cubes_limited_extent() ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - orig_sq_mag = u.data ** 2 + v.data ** 2 - res_sq_mag = ut.data ** 2 + vt.data ** 2 + orig_sq_mag = u.data**2 + v.data**2 + res_sq_mag = ut.data**2 + vt.data**2 self.assertArrayAllClose(orig_sq_mag, res_sq_mag, rtol=5e-4) def test_data_values(self): @@ -437,9 +437,9 @@ def test_rotated_to_osgb(self): self.assertArrayEqual(expected_mask, vt.data.mask) # Check unmasked values have sufficiently small error in mag. - expected_mag = np.sqrt(u.data ** 2 + v.data ** 2) + expected_mag = np.sqrt(u.data**2 + v.data**2) # Use underlying data to ignore mask in calculation. - res_mag = np.sqrt(ut.data.data ** 2 + vt.data.data ** 2) + res_mag = np.sqrt(ut.data.data**2 + vt.data.data**2) # Calculate percentage error (note there are no zero magnitudes # so we can divide safely). anom = 100.0 * np.abs(res_mag - expected_mag) / expected_mag @@ -493,5 +493,18 @@ def test_rotated_to_unrotated(self): self.assertArrayAlmostEqual(res_y, y2d) +class TestNonEarthPlanet(tests.IrisTest): + def test_non_earth_semimajor_axis(self): + u, v = uv_cubes() + u.coord("grid_latitude").coord_system = iris.coord_systems.GeogCS(123) + u.coord("grid_longitude").coord_system = iris.coord_systems.GeogCS(123) + v.coord("grid_latitude").coord_system = iris.coord_systems.GeogCS(123) + v.coord("grid_longitude").coord_system = iris.coord_systems.GeogCS(123) + other_cs = iris.coord_systems.RotatedGeogCS( + 0, 0, ellipsoid=iris.coord_systems.GeogCS(123) + ) + rotate_winds(u, v, other_cs) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py index 7d11c54660..311da8a0e6 100644 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ b/lib/iris/tests/unit/analysis/maths/__init__.py @@ -10,11 +10,14 @@ import iris.tests as tests # isort:skip from abc import ABCMeta, abstractmethod +import operator +import dask.array as da import numpy as np from numpy import ma from iris.analysis import MEAN +from iris.analysis.maths import add from iris.coords import DimCoord from iris.cube import Cube import iris.tests.stock as stock @@ -36,8 +39,46 @@ def cube_func(self): # I.E. 'iris.analysis.maths.xx'. pass + def _base_testcube(self, include_derived=False): + if include_derived: + self.cube = stock.realistic_4d() + else: + self.cube = stock.realistic_4d_no_derived() + self.cube_xy_dimcoords = ["grid_latitude", "grid_longitude"] + return self.cube + + def _meshcube_collapsesafe(self, cube, coords): + # Return the cube, or if need be a modified copy, which can be safely + # collapsed over the given coords. + # This is needed for mesh-cubes, because the mesh coords have + # bounds which are not understood by the standard 'collapse' operation. + # TODO: possibly replace with a future 'safe mesh collapse' operation. + # cf. https://github.com/SciTools/iris/issues/4672 + result = cube + if cube.mesh is not None: + collapse_dims = set() + for co in coords: + # Each must produce a single coord, with a single dim + (dim,) = cube.coord_dims(co) + collapse_dims.add(dim) + i_meshdim = cube.mesh_dim() + if i_meshdim in collapse_dims: + # Make a copy with all mesh coords replaced by their AuxCoord + # equivalents. A simple slicing will do that. + slices = [slice(None)] * cube.ndim + slices[i_meshdim] = slice(0, None) + result = cube[tuple(slices)] + # Finally, **remove bounds** from all the former AuxCoords. + # This is what enables them to be successfully collapsed. + for meshco in cube.coords(mesh_coords=True): + # Note: select new coord by name, as getting the AuxCoord + # which "matches" a MeshCoord is not possible. + result.coord(meshco.name()).bounds = None + + return result + def test_transposed(self): - cube = stock.realistic_4d_no_derived() + cube = self._base_testcube() other = cube.copy() other.transpose() res = self.cube_func(cube, other) @@ -46,7 +87,7 @@ def test_transposed(self): self.assertArrayEqual(res.data, expected_data) def test_collapse_zeroth_dim(self): - cube = stock.realistic_4d_no_derived() + cube = self._base_testcube() other = cube.collapsed("time", MEAN) res = self.cube_func(cube, other) self.assertCML(res, checksum=False) @@ -58,8 +99,10 @@ def test_collapse_zeroth_dim(self): self.assertMaskedArrayEqual(res.data, expected_data) def test_collapse_all_dims(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed(cube.coords(dim_coords=True), MEAN) + cube = self._base_testcube() + collapse_coords = cube.coords(dim_coords=True) + other = self._meshcube_collapsesafe(cube, collapse_coords) + other = other.collapsed(collapse_coords, MEAN) res = self.cube_func(cube, other) self.assertCML(res, checksum=False) # No modification to other.data is needed as numpy broadcasting @@ -70,21 +113,28 @@ def test_collapse_all_dims(self): self.assertArrayEqual(res.data, expected_data) def test_collapse_last_dims(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed(["grid_latitude", "grid_longitude"], MEAN) + cube = self._base_testcube() + # Collapse : by 'last' we mean the X+Y ones... + other = self._meshcube_collapsesafe(cube, self.cube_xy_dimcoords) + other = other.collapsed(self.cube_xy_dimcoords, MEAN) res = self.cube_func(cube, other) self.assertCML(res, checksum=False) # Transpose the dimensions in self.cube that have been collapsed in # other to lie at the front, thereby enabling numpy broadcasting to # function when applying data operator. Finish by transposing back # again to restore order. + n_xydims = len(self.cube_xy_dimcoords) + cube_dims = tuple(np.arange(cube.ndim)) + transpose_xy_back2front = cube_dims[-n_xydims:] + cube_dims[:-n_xydims] + transpose_xy_front2back = cube_dims[n_xydims:] + cube_dims[:n_xydims] expected_data = self.data_op( - cube.data.transpose((2, 3, 0, 1)), other.data - ).transpose(2, 3, 0, 1) + cube.data.transpose(transpose_xy_back2front), other.data + ).transpose(transpose_xy_front2back) + # Confirm result content is as expected self.assertMaskedArrayEqual(res.data, expected_data) def test_collapse_middle_dim(self): - cube = stock.realistic_4d_no_derived() + cube = self._base_testcube() other = cube.collapsed(["model_level_number"], MEAN) res = self.cube_func(cube, other) self.assertCML(res, checksum=False) @@ -94,12 +144,26 @@ def test_collapse_middle_dim(self): self.assertMaskedArrayEqual(res.data, expected_data) def test_slice(self): - cube = stock.realistic_4d_no_derived() + cube = self._base_testcube() for dim in range(cube.ndim): keys = [slice(None)] * cube.ndim keys[dim] = 3 other = cube[tuple(keys)] + + # A special "cheat" for mesh cases... + # When a mesh dimension is indexed, this produces scalar versions + # of the mesh-coords, which don't match to the originals. + # FOR NOW: remove those, for a result matching the other ones. + # TODO: coord equivalence may need reviewing, either for cube + # maths or for coord equivalance generally. + # cf. https://github.com/SciTools/iris/issues/4671 + if cube.mesh and dim == cube.mesh_dim(): + for co in cube.coords(mesh_coords=True): + other.remove_coord(co.name()) + res = self.cube_func(cube, other) + + # NOTE: only one testfile : any dim collapsed gives SAME result self.assertCML(res, checksum=False) # Add the collapsed dimension back in via np.newaxis to enable # numpy broadcasting to function. @@ -111,6 +175,17 @@ def test_slice(self): ) +class MathsAddOperationMixin: + # Test everything with the 'add' operation. + @property + def data_op(self): + return operator.add + + @property + def cube_func(self): + return add + + class CubeArithmeticMaskingTestMixin(metaclass=ABCMeta): # A framework for testing the mask handling behaviour of the various cube # arithmetic operations. (A test for each operation inherits this). @@ -127,18 +202,22 @@ def cube_func(self): # I.E. 'iris.analysis.maths.xx'. pass - def _test_partial_mask(self, in_place): + def _test_partial_mask(self, in_place, second_lazy=False): # Helper method for masked data tests. dat_a = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 0, 1, 0]) dat_b = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 1, 0, 0]) + if second_lazy: + cube_b = Cube(da.from_array(dat_b)) + else: + cube_b = Cube(dat_b) + cube_a = Cube(dat_a) - cube_b = Cube(dat_b) - com = self.data_op(dat_b, dat_a) - res = self.cube_func(cube_b, cube_a, in_place=in_place) + com = self.data_op(dat_a, dat_b) + res = self.cube_func(cube_a, cube_b, in_place=in_place) - return com, res, cube_b + return com, res, cube_a def test_partial_mask_in_place(self): # Cube in_place arithmetic operation. @@ -147,13 +226,38 @@ def test_partial_mask_in_place(self): self.assertMaskedArrayEqual(com, res.data, strict=True) self.assertIs(res, orig_cube) + def test_partial_mask_second_lazy_in_place(self): + # Only second cube has lazy data. + com, res, orig_cube = self._test_partial_mask(True, second_lazy=True) + self.assertMaskedArrayEqual(com, res.data, strict=True) + self.assertIs(res, orig_cube) + def test_partial_mask_not_in_place(self): # Cube arithmetic not an in_place operation. com, res, orig_cube = self._test_partial_mask(False) - self.assertMaskedArrayEqual(com, res.data) + self.assertMaskedArrayEqual(com, res.data, strict=True) self.assertIsNot(res, orig_cube) + def test_partial_mask_second_lazy_not_in_place(self): + # Only second cube has lazy data. + com, res, orig_cube = self._test_partial_mask(False, second_lazy=True) + self.assertMaskedArrayEqual(com, res.data, strict=True) + self.assertIsNot(res, orig_cube) + + def test_in_place_introduces_mask(self): + # If second cube is masked, result should also be masked. + data1 = np.arange(4, dtype=np.float) + data2 = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 1, 0, 0]) + cube1 = Cube(data1) + cube2 = Cube(data2) + + com = self.data_op(data1, data2) + res = self.cube_func(cube1, cube2, in_place=True) + + self.assertMaskedArrayEqual(com, res.data, strict=True) + self.assertIs(res, cube1) + class CubeArithmeticCoordsTest(tests.IrisTest): # This class sets up pairs of cubes to test iris' ability to reject diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py new file mode 100644 index 0000000000..51f71affb0 --- /dev/null +++ b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py @@ -0,0 +1,40 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for cube arithmetic involving derived (i.e. factory) coords.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from iris.tests.unit.analysis.maths import ( + CubeArithmeticBroadcastingTestMixin, + MathsAddOperationMixin, +) + + +@tests.skip_data +@tests.iristest_timing_decorator +class TestBroadcastingDerived( + tests.IrisTest_nometa, + MathsAddOperationMixin, + CubeArithmeticBroadcastingTestMixin, +): + """ + Repeat the broadcasting tests while retaining derived coordinates. + + NOTE: apart from showing that these operations do succeed, this mostly + produces a new set of CML result files, + in "lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords" . + See there to confirm that the results preserve the derived coordinates. + + """ + + def _base_testcube(self): + return super()._base_testcube(include_derived=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py new file mode 100644 index 0000000000..1d81e7b480 --- /dev/null +++ b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py @@ -0,0 +1,186 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for cube arithmetic involving MeshCoords.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import numpy as np + +from iris.analysis.maths import add +from iris.coords import AuxCoord, DimCoord +from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube +from iris.tests.unit.analysis.maths import ( + CubeArithmeticBroadcastingTestMixin, + CubeArithmeticCoordsTest, + MathsAddOperationMixin, +) + + +def _convert_to_meshcube(cube): + """Convert a cube based on stock.realistic_4d into a "meshcube".""" + # Replace lat+lon with a small mesh + cube = cube[..., -1] # remove final (X) dim + for name in ("grid_longitude", "grid_latitude"): + cube.remove_coord(name) + i_meshdim = len(cube.shape) - 1 + n_meshpoints = cube.shape[i_meshdim] + mesh = sample_mesh(n_nodes=n_meshpoints, n_faces=n_meshpoints, n_edges=0) + for co in mesh.to_MeshCoords(location="face"): + cube.add_aux_coord(co, i_meshdim) + # also add a dim-coord for the mesh dim, mainly so that + # the 'xxBroadcastingxx.test_collapse_all_dims' tests can do what they say. + mesh_dimcoord = DimCoord(np.arange(n_meshpoints), long_name="i_mesh_face") + cube.add_dim_coord(mesh_dimcoord, i_meshdim) + return cube + + +class MeshLocationsMixin: + # Control allowing us to also include test with derived coordinates. + use_derived_coords = False + + # Modify the inherited data operation, to test with a mesh-cube. + # Also, optionally, test with derived coordinates. + def _base_testcube(self): + cube = super()._base_testcube(include_derived=self.use_derived_coords) + cube = _convert_to_meshcube(cube) + self.cube_xy_dimcoords = ["i_mesh_face"] + self.cube = cube + return self.cube + + +@tests.skip_data +@tests.iristest_timing_decorator +class TestBroadcastingWithMesh( + tests.IrisTest_nometa, + MeshLocationsMixin, + MathsAddOperationMixin, + CubeArithmeticBroadcastingTestMixin, +): + """ + Run all the broadcasting tests on cubes with meshes. + + NOTE: there is a fair amount of special-case code to support this, built + into the CubeArithmeticBroadcastingTestMixin baseclass. + + """ + + +@tests.skip_data +@tests.iristest_timing_decorator +class TestBroadcastingWithMeshAndDerived( + tests.IrisTest_nometa, + MeshLocationsMixin, + MathsAddOperationMixin, + CubeArithmeticBroadcastingTestMixin, +): + """Run broadcasting tests with meshes *and* derived coords.""" + + use_derived = True + + +class TestCoordMatchWithMesh(CubeArithmeticCoordsTest): + """Run the coordinate-mismatch tests with meshcubes.""" + + def _convert_to_meshcubes(self, cubes, i_dim): + """Add a mesh to one dim of the 'normal case' test-cubes.""" + for cube in cubes: + n_size = cube.shape[i_dim] + mesh = sample_mesh(n_nodes=n_size, n_faces=n_size, n_edges=0) + for co in mesh.to_MeshCoords("face"): + cube.add_aux_coord(co, i_dim) + assert cube.mesh is not None + + def _check_no_match(self, dim): + # Duplicate the basic operation, but convert cubes to meshcubes. + cube1, cube2 = self.SetUpNonMatching() + self._convert_to_meshcubes([cube1, cube2], dim) + with self.assertRaises(ValueError): + add(cube1, cube2) + + def test_no_match_dim0(self): + self._check_no_match(0) + + def test_no_match_dim1(self): + self._check_no_match(1) + + def _check_reversed_points(self, dim): + # Duplicate the basic operation, but convert cubes to meshcubes. + cube1, cube2 = self.SetUpReversed() + self._convert_to_meshcubes([cube1, cube2], dim) + with self.assertRaises(ValueError): + add(cube1, cube2) + + def test_reversed_points_dim0(self): + self._check_reversed_points(0) + + def test_reversed_points_dim1(self): + self._check_reversed_points(1) + + +class TestBasicMeshOperation(tests.IrisTest): + """Some very basic standalone tests, in an easier-to-comprehend form.""" + + def test_meshcube_same_mesh(self): + # Two similar cubes on a common mesh add to a third on the same mesh. + mesh = sample_mesh() + cube1 = sample_mesh_cube(mesh=mesh) + cube2 = sample_mesh_cube(mesh=mesh) + self.assertIs(cube1.mesh, mesh) + self.assertIs(cube2.mesh, mesh) + + result = cube1 + cube2 + self.assertEqual(result.shape, cube1.shape) + self.assertIs(result.mesh, mesh) + + def test_meshcube_different_equal_mesh(self): + # Two similar cubes on identical but different meshes. + cube1 = sample_mesh_cube() + cube2 = sample_mesh_cube() + self.assertEqual(cube1.mesh, cube2.mesh) + self.assertIsNot(cube1.mesh, cube2.mesh) + + result = cube1 + cube2 + self.assertEqual(result.shape, cube1.shape) + self.assertEqual(result.mesh, cube1.mesh) + self.assertTrue(result.mesh is cube1.mesh or result.mesh is cube2.mesh) + + def test_fail_meshcube_nonequal_mesh(self): + # Cubes on similar but different meshes -- should *not* combine. + mesh1 = sample_mesh() + mesh2 = sample_mesh(n_edges=0) + self.assertNotEqual(mesh1, mesh2) + cube1 = sample_mesh_cube(mesh=mesh1) + cube2 = sample_mesh_cube(mesh=mesh2) + + msg = "Mesh coordinate.* does not match" + with self.assertRaisesRegex(ValueError, msg): + cube1 + cube2 + + def test_meshcube_meshcoord(self): + # Combining a meshcube and meshcoord. + cube = sample_mesh_cube() + cube.coord("latitude").units = "s" + cube.units = "m" + + # A separately derived, but matching 'latitude' MeshCoord. + coord = sample_mesh_cube().coord("latitude") + coord.units = "s" # N.B. the units **must also match** + + result = cube / coord + self.assertEqual(result.name(), "unknown") + self.assertEqual(result.units, "m s-1") + + # Moreover : *cannot* do this with the 'equivalent' AuxCoord + # cf. https://github.com/SciTools/iris/issues/4671 + coord = AuxCoord.from_coord(coord) + with self.assertRaises(ValueError): + cube / coord + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index f0dba83748..a018507fb3 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -33,7 +33,7 @@ def setUp(self): self.xs, self.ys = np.meshgrid(self.x.points, self.y.points) def transformation(x, y): - return x + y ** 2 + return x + y**2 # Construct a function which adds dimensions to the 2D data array # so that we can test higher dimensional functionality. diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py index f4c6623ad1..f0aa027baa 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py @@ -11,7 +11,7 @@ import iris.tests as tests # isort:skip import numpy as np -from scipy.sparse.csr import csr_matrix +from scipy.sparse import csr_matrix from iris.analysis._scipy_interpolate import _RegularGridInterpolator import iris.tests.stock as stock diff --git a/lib/iris/tests/unit/analysis/test_Aggregator.py b/lib/iris/tests/unit/analysis/test_Aggregator.py index 08180e61d0..ec837ea49a 100644 --- a/lib/iris/tests/unit/analysis/test_Aggregator.py +++ b/lib/iris/tests/unit/analysis/test_Aggregator.py @@ -156,6 +156,19 @@ def test_unmasked(self): self.assertArrayAlmostEqual(result, mock_return.copy()) mock_method.assert_called_once_with(data, axis=axis) + def test_allmasked_1D_with_mdtol(self): + data = ma.masked_all((3,)) + axis = 0 + mdtol = 0.5 + mock_return = ma.masked + with mock.patch.object( + self.TEST, "call_func", return_value=mock_return + ) as mock_method: + result = self.TEST.aggregate(data, axis, mdtol=mdtol) + + self.assertIs(result, mock_return) + mock_method.assert_called_once_with(data, axis=axis) + def test_returning_scalar_mdtol(self): # Test the case when the data aggregation function returns a scalar and # turns it into a masked array. diff --git a/lib/iris/tests/unit/analysis/test_MAX_RUN.py b/lib/iris/tests/unit/analysis/test_MAX_RUN.py new file mode 100755 index 0000000000..00de383f7a --- /dev/null +++ b/lib/iris/tests/unit/analysis/test_MAX_RUN.py @@ -0,0 +1,313 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :data:`iris.analysis.MAX_RUN` aggregator.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import dask.array as da +import numpy as np +import numpy.ma as ma + +from iris._lazy_data import as_concrete_data, is_lazy_data +from iris.analysis import MAX_RUN + + +def bool_func(x): + return x == 1 + + +class UnmaskedTest(tests.IrisTest): + def setUp(self): + """ + Set up 1d and 2d unmasked data arrays for max run testing. + + Uses 1 and 3 rather than 1 and 0 to check that lambda is being applied. + """ + + self.data_1ds = [ + (np.array([3, 1, 1, 3, 3, 3]), 2), # One run + (np.array([3, 1, 1, 3, 1, 3]), 2), # Two runs + (np.array([3, 3, 3, 3, 3, 3]), 0), # No run + (np.array([3, 3, 1, 3, 3, 3]), 1), # Max run of 1 + (np.array([1, 1, 1, 3, 1, 3]), 3), # Run to start + (np.array([3, 1, 3, 1, 1, 1]), 3), # Run to end + (np.array([1, 1, 1, 1, 1, 1]), 6), # All run + ] + + self.data_2d_axis0 = np.array( + [ + [3, 1, 1, 3, 3, 3], # One run + [3, 1, 1, 3, 1, 3], # Two runs + [3, 3, 3, 3, 3, 3], # No run + [3, 3, 1, 3, 3, 3], # Max run of 1 + [1, 1, 1, 3, 1, 3], # Run to start + [3, 1, 3, 1, 1, 1], # Run to end + [1, 1, 1, 1, 1, 1], # All run + ] + ).T + self.expected_2d_axis0 = np.array([2, 2, 0, 1, 3, 3, 6]) + + self.data_2d_axis1 = self.data_2d_axis0.T + self.expected_2d_axis1 = self.expected_2d_axis0 + + +class MaskedTest(tests.IrisTest): + def setUp(self): + """ + Set up 1d and 2d unmasked data arrays for max run testing. + + Uses 1 and 3 rather than 1 and 0 to check that lambda is being applied. + """ + + self.data_1ds = [ + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([0, 0, 0, 0, 0, 0]) + ), + 3, + ), # No mask + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([0, 0, 0, 0, 1, 1]) + ), + 3, + ), # Mask misses run + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([1, 1, 1, 0, 0, 0]) + ), + 1, + ), # Mask max run + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([0, 0, 1, 0, 0, 0]) + ), + 2, + ), # Partially mask run + ( + ma.masked_array( + np.array([3, 1, 1, 1, 1, 3]), np.array([0, 0, 1, 0, 0, 0]) + ), + 2, + ), # Mask interrupts run + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([1, 1, 1, 1, 1, 1]) + ), + 0, + ), # All mask + ( + ma.masked_array( + np.array([1, 1, 1, 3, 1, 3]), np.array([1, 1, 1, 1, 0, 1]) + ), + 1, + ), # All mask or run + ] + + self.data_2d_axis0 = ma.masked_array( + np.array( + [ + [1, 1, 1, 3, 1, 3], + [1, 1, 1, 3, 1, 3], + [1, 1, 1, 3, 1, 3], + [1, 1, 1, 3, 1, 3], + [1, 1, 1, 3, 1, 3], + [1, 1, 1, 3, 1, 3], + ] + ), + np.array( + [ + [0, 0, 0, 0, 0, 0], # No mask + [0, 0, 0, 0, 1, 1], # Mask misses run + [1, 1, 1, 0, 0, 0], # Mask max run + [0, 0, 1, 0, 0, 0], # Partially mask run + [1, 1, 1, 1, 1, 1], # All mask + [1, 1, 1, 1, 0, 1], # All mask or run + ] + ), + ).T + + self.expected_2d_axis0 = np.array([3, 3, 1, 2, 0, 1]) + + self.data_2d_axis1 = self.data_2d_axis0.T + self.expected_2d_axis1 = self.expected_2d_axis0 + + +class RealMixin: + def run_func(self, *args, **kwargs): + return MAX_RUN.call_func(*args, **kwargs) + + def check_array(self, result, expected): + self.assertArrayEqual(result, expected) + + +class LazyMixin: + def run_func(self, *args, **kwargs): + return MAX_RUN.lazy_func(*args, **kwargs) + + def check_array(self, result, expected, expected_chunks): + self.assertTrue(is_lazy_data(result)) + self.assertTupleEqual(result.chunks, expected_chunks) + result = as_concrete_data(result) + self.assertArrayEqual(result, expected) + + +class TestBasic(UnmaskedTest, RealMixin): + def test_1d(self): + for data, expected in self.data_1ds: + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array(result, expected) + + def test_2d_axis0(self): + result = self.run_func( + self.data_2d_axis0, + axis=0, + function=bool_func, + ) + self.check_array(result, self.expected_2d_axis0) + + def test_2d_axis1(self): + result = self.run_func( + self.data_2d_axis1, + axis=1, + function=bool_func, + ) + self.check_array(result, self.expected_2d_axis1) + + +class TestLazy(UnmaskedTest, LazyMixin): + def test_1d(self): + for data, expected in self.data_1ds: + data = da.from_array(data) + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array(result, expected, ()) + + def test_2d_axis0(self): + data = da.from_array(self.data_2d_axis0) + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array( + result, self.expected_2d_axis0, ((len(self.expected_2d_axis0),),) + ) + + def test_2d_axis1(self): + data = da.from_array(self.data_2d_axis1) + result = self.run_func( + data, + axis=1, + function=bool_func, + ) + self.check_array( + result, self.expected_2d_axis1, ((len(self.expected_2d_axis1),),) + ) + + +class TestLazyChunked(UnmaskedTest, LazyMixin): + def test_1d(self): + for data, expected in self.data_1ds: + data = da.from_array(data, chunks=(1,)) + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array(result, expected, ()) + + def test_2d_axis0_chunk0(self): + data = da.from_array(self.data_2d_axis0, chunks=(1, -1)) + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array( + result, self.expected_2d_axis0, ((len(self.expected_2d_axis0),),) + ) + + def test_2d_axis0_chunk1(self): + data = da.from_array(self.data_2d_axis0, chunks=(-1, 1)) + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + expected_chunks = (tuple([1] * len(self.expected_2d_axis0)),) + self.check_array(result, self.expected_2d_axis0, expected_chunks) + + def test_2d_axis1_chunk0(self): + data = da.from_array(self.data_2d_axis1, chunks=(1, -1)) + result = self.run_func( + data, + axis=1, + function=bool_func, + ) + expected_chunks = (tuple([1] * len(self.expected_2d_axis1)),) + self.check_array(result, self.expected_2d_axis1, expected_chunks) + + def test_2d_axis1_chunk1(self): + data = da.from_array(self.data_2d_axis1, chunks=(-1, 1)) + result = self.run_func( + data, + axis=1, + function=bool_func, + ) + self.check_array( + result, self.expected_2d_axis1, ((len(self.expected_2d_axis1),),) + ) + + +class TestMasked(MaskedTest, RealMixin): + def test_1d(self): + for data, expected in self.data_1ds: + result = self.run_func( + data, + axis=0, + function=bool_func, + ) + self.check_array(result, expected) + + def test_2d_axis0(self): + result = self.run_func( + self.data_2d_axis0, + axis=0, + function=bool_func, + ) + self.check_array(result, self.expected_2d_axis0) + + def test_2d_axis1(self): + result = self.run_func( + self.data_2d_axis1, + axis=1, + function=bool_func, + ) + self.check_array(result, self.expected_2d_axis1) + + +class Test_name(tests.IrisTest): + def test(self): + self.assertEqual(MAX_RUN.name(), "max_run") + + +class Test_cell_method(tests.IrisTest): + def test(self): + self.assertIsNone(MAX_RUN.cell_method) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index 52648f6fb8..bfd3234d26 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -9,92 +9,393 @@ # importing anything else. import iris.tests as tests # isort:skip +from unittest import mock + +import dask.array as da import numpy as np import numpy.ma as ma +from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data from iris.analysis import PERCENTILE -class Test_aggregate(tests.IrisTest): - def test_missing_mandatory_kwarg(self): - emsg = "percentile aggregator requires .* keyword argument 'percent'" - with self.assertRaisesRegex(ValueError, emsg): - PERCENTILE.aggregate("dummy", axis=0) +class AggregateMixin: + """ + Percentile aggregation tests for both numpy and scipy methods within lazy + and real percentile aggregation. + + """ + + def check_percentile_calc( + self, data, axis, percent, expected, approx=False, **kwargs + ): + if self.lazy: + data = as_lazy_data(data) + + expected = ma.array(expected) + + actual = self.agg_method( + data, + axis=axis, + percent=percent, + fast_percentile_method=self.fast, + **kwargs, + ) + + self.assertTupleEqual(actual.shape, expected.shape) + is_lazy = is_lazy_data(actual) + + if self.lazy: + self.assertTrue(is_lazy) + actual = as_concrete_data(actual) + else: + self.assertFalse(is_lazy) + + if approx: + self.assertMaskedArrayAlmostEqual(actual, expected) + else: + self.assertMaskedArrayEqual(actual, expected) def test_1d_single(self): data = np.arange(11) - actual = PERCENTILE.aggregate(data, axis=0, percent=50) + axis = 0 + percent = 50 expected = 5 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - - def test_masked_1d_single(self): - data = ma.arange(11) - data[3:7] = ma.masked - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - expected = 7 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) + self.check_percentile_calc(data, axis, percent, expected) def test_1d_multi(self): data = np.arange(11) percent = np.array([20, 50, 90]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) + axis = 0 expected = [2, 5, 9] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayEqual(actual, expected) + self.check_percentile_calc(data, axis, percent, expected) + + def test_2d_single(self): + shape = (2, 11) + data = np.arange(np.prod(shape)).reshape(shape) + axis = 0 + percent = 50 + expected = np.arange(shape[-1]) + 5.5 + self.check_percentile_calc(data, axis, percent, expected) + + def test_2d_multi(self): + shape = (2, 10) + data = np.arange(np.prod(shape)).reshape(shape) + axis = 0 + percent = np.array([10, 50, 90, 100]) + expected = np.tile(np.arange(shape[-1]), percent.size) + expected = expected.reshape(percent.size, shape[-1]).T + 1 + expected = expected + (percent / 10 - 1) + self.check_percentile_calc(data, axis, percent, expected, approx=True) + + +class ScipyAggregateMixin: + """ + Tests for calculations specific to the default (scipy) function. Includes + tests on masked data and tests to verify that the function is called with + the expected keywords. Needs to be used with AggregateMixin, as some of + these tests re-use its method. + + """ + + def test_masked_1d_single(self): + data = ma.arange(11) + data[3:7] = ma.masked + axis = 0 + percent = 50 + expected = 7 + self.check_percentile_calc(data, axis, percent, expected) def test_masked_1d_multi(self): data = ma.arange(11) data[3:9] = ma.masked percent = np.array([25, 50, 75]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) + axis = 0 expected = [1, 2, 9] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayEqual(actual, expected) - - def test_2d_single(self): - shape = (2, 11) - data = np.arange(np.prod(shape)).reshape(shape) - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - self.assertTupleEqual(actual.shape, shape[-1:]) - expected = np.arange(shape[-1]) + 5.5 - self.assertArrayEqual(actual, expected) + self.check_percentile_calc(data, axis, percent, expected) def test_masked_2d_single(self): shape = (2, 11) data = ma.arange(np.prod(shape)).reshape(shape) data[0, ::2] = ma.masked data[1, 1::2] = ma.masked - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - self.assertTupleEqual(actual.shape, shape[-1:]) + axis = 0 + percent = 50 + # data has only one value for each column being aggregated, so result + # should be that value. expected = np.empty(shape[-1:]) expected[1::2] = data[0, 1::2] expected[::2] = data[1, ::2] - self.assertArrayEqual(actual, expected) - - def test_2d_multi(self): - shape = (2, 10) - data = np.arange(np.prod(shape)).reshape(shape) - percent = np.array([10, 50, 90, 100]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size) - expected = expected.reshape(percent.size, shape[-1]).T + 1 - expected = expected + (percent / 10 - 1) - self.assertArrayAlmostEqual(actual, expected) + self.check_percentile_calc(data, axis, percent, expected) def test_masked_2d_multi(self): shape = (3, 10) data = ma.arange(np.prod(shape)).reshape(shape) - data[1] = ma.masked + data[1, ::2] = ma.masked percent = np.array([10, 50, 70, 80]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size) - expected = expected.reshape(percent.size, shape[-1]).T - expected = expected + (percent / 10 * 2) - self.assertArrayAlmostEqual(actual, expected) + axis = 0 + mdtol = 0.1 + + # First column is just 0 and 20. Percentiles of these can be calculated as + # linear interpolation. + expected = percent / 100 * 20 + # Other columns are first column plus column number. + expected = ma.array( + np.broadcast_to(expected, (shape[-1], percent.size)) + + np.arange(shape[-1])[:, np.newaxis] + ) + expected[::2] = ma.masked + + self.check_percentile_calc( + data, axis, percent, expected, mdtol=mdtol, approx=True + ) + + @mock.patch("scipy.stats.mstats.mquantiles", return_value=[2, 4]) + def test_default_kwargs_passed(self, mocked_mquantiles): + data = np.arange(5) + percent = [42, 75] + axis = 0 + if self.lazy: + data = as_lazy_data(data) + + self.agg_method(data, axis=axis, percent=percent) + + # Trigger calculation for lazy case. + as_concrete_data(data) + for key in ["alphap", "betap"]: + self.assertEqual(mocked_mquantiles.call_args.kwargs[key], 1) + + @mock.patch("scipy.stats.mstats.mquantiles") + def test_chosen_kwargs_passed(self, mocked_mquantiles): + data = np.arange(5) + percent = [42, 75] + axis = 0 + if self.lazy: + data = as_lazy_data(data) + + self.agg_method( + data, axis=axis, percent=percent, alphap=0.6, betap=0.5 + ) + + # Trigger calculation for lazy case. + as_concrete_data(data) + for key, val in zip(["alphap", "betap"], [0.6, 0.5]): + self.assertEqual(mocked_mquantiles.call_args.kwargs[key], val) + + +class Test_aggregate(tests.IrisTest, AggregateMixin, ScipyAggregateMixin): + """Tests for standard aggregation method on real data.""" + + def setUp(self): + self.fast = False + self.lazy = False + self.agg_method = PERCENTILE.aggregate + + def test_missing_mandatory_kwarg(self): + emsg = "percentile aggregator requires .* keyword argument 'percent'" + with self.assertRaisesRegex(ValueError, emsg): + PERCENTILE.aggregate("dummy", axis=0) + + def test_wrong_kwarg(self): + # Test we get an error out of scipy if we pass the numpy keyword. + data = range(5) + emsg = "unexpected keyword argument" + with self.assertRaisesRegex(TypeError, emsg): + PERCENTILE.aggregate(data, percent=50, axis=0, method="nearest") + + +class Test_fast_aggregate(tests.IrisTest, AggregateMixin): + """Tests for fast percentile method on real data.""" + + def setUp(self): + self.fast = True + self.lazy = False + self.agg_method = PERCENTILE.aggregate + + def test_masked(self): + # Using (3,11) because np.percentile returns a masked array anyway with + # (2, 11) + shape = (3, 11) + data = ma.arange(np.prod(shape)).reshape(shape) + data[0, ::2] = ma.masked + emsg = ( + "Cannot use fast np.percentile method with masked array unless " + "mdtol is 0." + ) + with self.assertRaisesRegex(TypeError, emsg): + PERCENTILE.aggregate( + data, axis=0, percent=50, fast_percentile_method=True + ) + + def test_masked_mdtol_0(self): + # Using (3,11) because np.percentile returns a masked array anyway with + # (2, 11) + shape = (3, 11) + axis = 0 + percent = 50 + data = ma.arange(np.prod(shape)).reshape(shape) + data[0, ::2] = ma.masked + expected = ma.arange(shape[-1]) + 11 + expected[::2] = ma.masked + self.check_percentile_calc(data, axis, percent, expected, mdtol=0) + + @mock.patch("numpy.percentile") + def test_numpy_percentile_called(self, mocked_percentile): + # Basic check that numpy.percentile is called. + data = np.arange(5) + self.agg_method(data, axis=0, percent=42, fast_percentile_method=True) + mocked_percentile.assert_called_once() + + # Check that we left "method" keyword to numpy's default. + self.assertNotIn("method", mocked_percentile.call_args.kwargs) + + @mock.patch("numpy.percentile") + def test_chosen_kwarg_passed(self, mocked_percentile): + data = np.arange(5) + percent = [42, 75] + axis = 0 + + self.agg_method( + data, + axis=axis, + percent=percent, + fast_percentile_method=True, + method="nearest", + ) + self.assertEqual( + mocked_percentile.call_args.kwargs["method"], "nearest" + ) + + +class MultiAxisMixin: + """ + Tests for axis passed as a tuple. Only relevant for lazy aggregation since + axis is always specified as int for real aggregation. + + """ + + def test_multi_axis(self): + data = np.arange(24).reshape((2, 3, 4)) + collapse_axes = (0, 2) + lazy_data = as_lazy_data(data) + percent = 30 + actual = PERCENTILE.lazy_aggregate( + lazy_data, + axis=collapse_axes, + percent=percent, + fast_percentile_method=self.fast, + ) + self.assertTrue(is_lazy_data(actual)) + result = as_concrete_data(actual) + self.assertTupleEqual(result.shape, (3,)) + for num, sub_result in enumerate(result): + # results should be the same as percentiles calculated from slices. + self.assertArrayAlmostEqual( + sub_result, np.percentile(data[:, num, :], percent) + ) + + def test_multi_axis_multi_percent(self): + data = np.arange(24).reshape((2, 3, 4)) + collapse_axes = (0, 2) + lazy_data = as_lazy_data(data) + percent = [20, 30, 50, 70, 80] + actual = PERCENTILE.lazy_aggregate( + lazy_data, + axis=collapse_axes, + percent=percent, + fast_percentile_method=self.fast, + ) + self.assertTrue(is_lazy_data(actual)) + result = as_concrete_data(actual) + self.assertTupleEqual(result.shape, (3, 5)) + for num, sub_result in enumerate(result): + # results should be the same as percentiles calculated from slices. + self.assertArrayAlmostEqual( + sub_result, np.percentile(data[:, num, :], percent) + ) + + +class Test_lazy_fast_aggregate(tests.IrisTest, AggregateMixin, MultiAxisMixin): + """Tests for fast aggregation on lazy data.""" + + def setUp(self): + self.fast = True + self.lazy = True + self.agg_method = PERCENTILE.lazy_aggregate + + def test_masked(self): + shape = (2, 11) + data = ma.arange(np.prod(shape)).reshape(shape) + data[0, ::2] = ma.masked + data = as_lazy_data(data) + actual = PERCENTILE.lazy_aggregate( + data, axis=0, percent=50, fast_percentile_method=True + ) + emsg = ( + "Cannot use fast np.percentile method with masked array unless " + "mdtol is 0." + ) + with self.assertRaisesRegex(TypeError, emsg): + as_concrete_data(actual) + + def test_masked_mdtol_0(self): + # Using (3,11) because np.percentile returns a masked array anyway with + # (2, 11) + shape = (3, 11) + axis = 0 + percent = 50 + data = ma.arange(np.prod(shape)).reshape(shape) + data[0, ::2] = ma.masked + data = as_lazy_data(data) + expected = ma.arange(shape[-1]) + 11 + expected[::2] = ma.masked + self.check_percentile_calc(data, axis, percent, expected, mdtol=0) + + @mock.patch("numpy.percentile", return_value=np.array([2, 4])) + def test_numpy_percentile_called(self, mocked_percentile): + # Basic check that numpy.percentile is called. + data = da.arange(5) + result = self.agg_method( + data, axis=0, percent=[42, 75], fast_percentile_method=True + ) + + self.assertTrue(is_lazy_data(result)) + as_concrete_data(result) + mocked_percentile.assert_called() + + # Check we have left "method" keyword to numpy's default. + self.assertNotIn("method", mocked_percentile.call_args.kwargs) + + @mock.patch("numpy.percentile") + def test_chosen_method_kwarg_passed(self, mocked_percentile): + data = da.arange(5) + percent = [42, 75] + axis = 0 + + result = self.agg_method( + data, + axis=axis, + percent=percent, + fast_percentile_method=True, + method="nearest", + ) + + self.assertTrue(is_lazy_data(result)) + as_concrete_data(result) + self.assertEqual( + mocked_percentile.call_args.kwargs["method"], "nearest" + ) + + +class Test_lazy_aggregate( + tests.IrisTest, AggregateMixin, ScipyAggregateMixin, MultiAxisMixin +): + """Tests for standard aggregation on lazy data.""" + + def setUp(self): + self.fast = False + self.lazy = True + self.agg_method = PERCENTILE.lazy_aggregate class Test_name(tests.IrisTest): diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index a8e6ed28ed..f11cd7a8d3 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -14,9 +14,11 @@ from unittest import mock +import dask.array as da import numpy as np -from iris.analysis import PercentileAggregator, _percentile +from iris._lazy_data import as_concrete_data +from iris.analysis import PercentileAggregator from iris.coords import AuxCoord, DimCoord from iris.cube import Cube @@ -24,16 +26,10 @@ class Test(tests.IrisTest): def test_init(self): name = "percentile" - call_func = _percentile units_func = mock.sentinel.units_func - lazy_func = mock.sentinel.lazy_func - aggregator = PercentileAggregator( - units_func=units_func, lazy_func=lazy_func - ) + aggregator = PercentileAggregator(units_func=units_func) self.assertEqual(aggregator.name(), name) - self.assertIs(aggregator.call_func, call_func) self.assertIs(aggregator.units_func, units_func) - self.assertIs(aggregator.lazy_func, lazy_func) self.assertIsNone(aggregator.cell_method) @@ -85,7 +81,7 @@ def test_simple_multiple_points(self): self.cube_simple, data, coords, **kwargs ) self.assertEqual(actual.shape, percent.shape + self.cube_simple.shape) - expected = np.rollaxis(data, -1) + expected = data.T self.assertArrayEqual(actual.data, expected) name = "percentile_over_time" coord = actual.coord(name) @@ -119,13 +115,29 @@ def test_multi_multiple_points(self): self.cube_multi, data, coords, **kwargs ) self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) - expected = np.rollaxis(data, -1) + expected = np.moveaxis(data, -1, 0) self.assertArrayEqual(actual.data, expected) name = "percentile_over_time" coord = actual.coord(name) expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) + def test_multi_multiple_points_lazy(self): + # Check that lazy data is preserved. + aggregator = PercentileAggregator() + percent = np.array([17, 29, 81]) + kwargs = dict(percent=percent) + shape = self.cube_multi.shape + percent.shape + data = da.arange(np.prod(shape)).reshape(shape) + coords = [self.coord_multi_0] + actual = aggregator.post_process( + self.cube_multi, data, coords, **kwargs + ) + self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) + self.assertTrue(actual.has_lazy_data()) + expected = np.moveaxis(as_concrete_data(data), -1, 0) + self.assertArrayEqual(actual.data, expected) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/test_SUM.py b/lib/iris/tests/unit/analysis/test_SUM.py index dd2dcf9f9c..64699b442f 100644 --- a/lib/iris/tests/unit/analysis/test_SUM.py +++ b/lib/iris/tests/unit/analysis/test_SUM.py @@ -9,6 +9,7 @@ # importing anything else. import iris.tests as tests # isort:skip +import dask.array as da import numpy as np import numpy.ma as ma @@ -91,6 +92,16 @@ def test_weights_and_returned(self): self.assertArrayEqual(data, [14, 9, 11, 13, 15]) self.assertArrayEqual(weights, [4, 2, 2, 2, 2]) + def test_masked_weights_and_returned(self): + array = ma.array( + self.cube_2d.data, mask=[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0]] + ) + data, weights = SUM.aggregate( + array, axis=0, weights=self.weights, returned=True + ) + self.assertArrayEqual(data, [14, 9, 8, 4, 15]) + self.assertArrayEqual(weights, [4, 2, 1, 1, 2]) + class Test_lazy_weights_and_returned(tests.IrisTest): def setUp(self): @@ -128,6 +139,17 @@ def test_weights_and_returned(self): self.assertArrayEqual(lazy_data.compute(), [14, 9, 11, 13, 15]) self.assertArrayEqual(weights, [4, 2, 2, 2, 2]) + def test_masked_weights_and_returned(self): + array = da.ma.masked_array( + self.cube_2d.lazy_data(), mask=[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0]] + ) + lazy_data, weights = SUM.lazy_aggregate( + array, axis=0, weights=self.weights, returned=True + ) + self.assertTrue(is_lazy_data(lazy_data)) + self.assertArrayEqual(lazy_data.compute(), [14, 9, 8, 4, 15]) + self.assertArrayEqual(weights, [4, 2, 1, 1, 2]) + class Test_aggregate_shape(tests.IrisTest): def test(self): diff --git a/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py new file mode 100644 index 0000000000..505a00df78 --- /dev/null +++ b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py @@ -0,0 +1,150 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :data:`iris.analysis._axis_to_single_trailing` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from unittest import mock + +import dask.array as da +import numpy as np + +from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data +from iris.analysis import _axis_to_single_trailing + + +class TestInputReshape(tests.IrisTest): + """Tests to make sure correct array is passed into stat function.""" + + def setUp(self): + self.stat_func = mock.Mock() + + def check_input(self, data, axis, expected): + """ + Given data and axis passed to the wrapped function, check that expected + array is passed to the inner function. + + """ + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + wrapped_stat_func(data, axis=axis) + # Can't use Mock.assert_called_with because array equality is ambiguous + # get hold of the first arg instead. + self.assertArrayEqual(self.stat_func.call_args.args[0], expected) + + def test_1d_input(self): + # Trailing axis chosen, so array should be unchanged. + data = np.arange(5) + axis = 0 + self.check_input(data, axis, data) + + def test_2d_input_trailing(self): + # Trailing axis chosen, so array should be unchanged. + data = np.arange(6).reshape(2, 3) + axis = 1 + self.stat_func.return_value = np.empty(2) + self.check_input(data, axis, data) + + def test_2d_input_transpose(self): + # Leading axis chosen, so array should be transposed. + data = np.arange(6).reshape(2, 3) + axis = 0 + self.stat_func.return_value = np.empty(3) + self.check_input(data, axis, data.T) + + def test_3d_input_middle(self): + # Middle axis is chosen, should be moved to end. Other dims should be + # flattened. + data = np.arange(24).reshape(2, 3, 4) + axis = 1 + self.stat_func.return_value = np.empty(8) + expected = np.moveaxis(data, 1, 2).reshape(8, 3) + self.check_input(data, axis, expected) + + def test_3d_input_leading_multiple(self): + # First 2 axis chosen, should be flattened and moved to end. + data = np.arange(24).reshape(2, 3, 4) + axis = (0, 1) + self.stat_func.return_value = np.empty(4) + expected = np.moveaxis(data, 2, 0).reshape(4, 6) + self.check_input(data, axis, expected) + + def test_4d_first_and_last(self): + data = np.arange(120).reshape(2, 3, 4, 5) + axis = (0, -1) + self.stat_func.return_value = np.empty(12) + expected = np.moveaxis(data, 0, 2).reshape(12, 10) + self.check_input(data, axis, expected) + + def test_3d_input_leading_multiple_lazy(self): + # First 2 axis chosen, should be flattened and moved to end. Lazy data + # should be preserved. + data = np.arange(24).reshape(2, 3, 4) + lazy_data = as_lazy_data(data) + axis = (0, 1) + self.stat_func.return_value = np.empty(4) + expected = np.moveaxis(data, 2, 0).reshape(4, 6) + + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + wrapped_stat_func(lazy_data, axis=axis) + self.assertTrue(is_lazy_data(self.stat_func.call_args.args[0])) + self.assertArrayEqual( + as_concrete_data(self.stat_func.call_args.args[0]), expected + ) + + +class TestOutputReshape(tests.IrisTest): + """Tests to make sure array from stat function is handled correctly.""" + + def setUp(self): + self.stat_func = mock.Mock() + + def test_1d_input_1d_output(self): + # If array is fully aggregated, result should be same as returned by stat + # function. + data = np.arange(3) + self.stat_func.return_value = np.arange(2) + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + result = wrapped_stat_func(data, axis=0) + self.assertArrayEqual(result, self.stat_func.return_value) + + def test_3d_input_middle_single_stat(self): + # result shape should match non-aggregated input dims. + data = np.empty((2, 3, 4)) + axis = 1 + self.stat_func.return_value = np.arange(8) + expected = np.arange(8).reshape(2, 4) + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + result = wrapped_stat_func(data, axis=axis) + self.assertArrayEqual(result, expected) + + def test_3d_input_middle_single_stat_lazy(self): + # result shape should match non-aggregated input dims. Lazy data should + # be preserved. + data = np.empty((2, 3, 4)) + axis = 1 + self.stat_func.return_value = da.arange(8) + expected = np.arange(8).reshape(2, 4) + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + result = wrapped_stat_func(data, axis=axis) + self.assertTrue(is_lazy_data(result)) + self.assertArrayEqual(as_concrete_data(result), expected) + + def test_3d_input_middle_multiple_stat(self): + # result shape should match non-aggregated input dims, plus trailing dim + # with size determined by the stat function. + data = np.empty((2, 3, 4)) + axis = 1 + self.stat_func.return_value = np.arange(8 * 5).reshape(8, 5) + expected = np.arange(40).reshape(2, 4, 5) + wrapped_stat_func = _axis_to_single_trailing(self.stat_func) + result = wrapped_stat_func(data, axis=axis) + self.assertArrayEqual(result, expected) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py index 038019611c..dad781ed74 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py @@ -181,5 +181,132 @@ def test_metadata(self): self.assertEqual(result, expected) +class TestLinear(tests.IrisTest): + # Test interpolation with 'linear' method. + # This is basically a wrapper to 'analysis._scipy_interpolate''s + # _RegulardGridInterpolator. That has its own test, so we don't test the + # basic calculation exhaustively here. Instead we check the way it + # handles the source and result cubes (especially coordinates). + + def setUp(self): + cube = iris.tests.stock.simple_3d() + # Actually, this cube *isn't* terribly realistic, as the lat+lon coords + # have integer type, which in this case produces some peculiar results. + # Let's fix that (and not bother to test the peculiar behaviour). + for coord_name in ("longitude", "latitude"): + coord = cube.coord(coord_name) + coord.points = coord.points.astype(float) + self.test_cube = cube + # Set sample point to test single-point linear interpolation operation. + self.single_sample_point = [ + ("latitude", [9]), + ("longitude", [-120]), + ] + # Set expected results of single-point linear interpolation operation. + self.single_sample_result = np.array( + [ + 64 / 15, + 244 / 15, + ] + )[:, np.newaxis] + + def test_single_point_same_cube(self): + # Check exact result matching for a single point. + cube = self.test_cube + result = interpolate(cube, self.single_sample_point, method="linear") + # Check that the result is a single trajectory point, exactly equal to + # the expected part of the original data. + self.assertEqual(result.shape[-1], 1) + self.assertArrayAllClose(result.data, self.single_sample_result) + + def test_multi_point_same_cube(self): + # Check an exact result for multiple points. + cube = self.test_cube + # Use latitude selection to recreate a whole row of the original cube. + sample_points = [ + ("longitude", [-180, -90, 0, 90]), + ("latitude", [0, 0, 0, 0]), + ] + result = interpolate(cube, sample_points, method="linear") + + # The result should be identical to a single latitude section of the + # original, but with modified coords (latitude has 4 repeated zeros). + expected = cube[:, 1, :] + # Result 'longitude' is now an aux coord. + co_x = expected.coord("longitude") + expected.remove_coord(co_x) + expected.add_aux_coord(co_x, 1) + # Result 'latitude' is now an aux coord containing 4*[0]. + expected.remove_coord("latitude") + co_y = AuxCoord( + [0, 0, 0, 0], standard_name="latitude", units="degrees" + ) + expected.add_aux_coord(co_y, 1) + self.assertEqual(result, expected) + + def test_aux_coord_noninterpolation_dim(self): + # Check exact result with an aux-coord mapped to an uninterpolated dim. + cube = self.test_cube + cube.add_aux_coord(DimCoord([17, 19], long_name="aux0"), 0) + + # The result cube should exactly equal a single source point. + result = interpolate(cube, self.single_sample_point, method="linear") + self.assertEqual(result.shape[-1], 1) + self.assertArrayAllClose(result.data, self.single_sample_result) + + def test_aux_coord_one_interp_dim(self): + # Check exact result with an aux-coord over one interpolation dims. + cube = self.test_cube + cube.add_aux_coord(AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2) + + # The result cube should exactly equal a single source point. + result = interpolate(cube, self.single_sample_point, method="linear") + self.assertEqual(result.shape[-1], 1) + self.assertArrayAllClose(result.data, self.single_sample_result) + + def test_aux_coord_both_interp_dims(self): + # Check exact result with an aux-coord over both interpolation dims. + cube = self.test_cube + cube.add_aux_coord( + AuxCoord( + [[11, 12, 13, 14], [21, 22, 23, 24], [31, 32, 33, 34]], + long_name="aux_xy", + ), + (1, 2), + ) + + # The result cube should exactly equal a single source point. + result = interpolate(cube, self.single_sample_point, method="linear") + self.assertEqual(result.shape[-1], 1) + self.assertArrayAllClose(result.data, self.single_sample_result) + + def test_aux_coord_fail_mixed_dims(self): + # Check behaviour with an aux-coord mapped over both interpolation and + # non-interpolation dims : not supported. + cube = self.test_cube + cube.add_aux_coord( + AuxCoord( + [[111, 112, 113, 114], [211, 212, 213, 214]], + long_name="aux_0x", + ), + (0, 2), + ) + msg = "Coord aux_0x was expected to have new points of shape .*\\. Found shape of .*\\." + with self.assertRaisesRegex(ValueError, msg): + interpolate(cube, self.single_sample_point, method="linear") + + def test_metadata(self): + # Check exact result matching for a single point, with additional + # attributes and cell-methods. + cube = self.test_cube + cube.attributes["ODD_ATTR"] = "string-value-example" + cube.add_cell_method(iris.coords.CellMethod("mean", "area")) + result = interpolate(cube, self.single_sample_point, method="linear") + # Check that the result is a single trajectory point, exactly equal to + # the expected part of the original data. + self.assertEqual(result.shape[-1], 1) + self.assertArrayAllClose(result.data, self.single_sample_result) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index efcbde8965..4ffeb7a67a 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -51,7 +51,7 @@ def _members(self): self.assertEqual(self.names(Metadata.__mro__), expected) emsg = ( "Can't instantiate abstract class .* with abstract " - "methods _members" + "method.* _members" ) with self.assertRaisesRegex(TypeError, emsg): _ = Metadata() diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 98643c8f10..840f65db01 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -15,7 +15,7 @@ from collections import namedtuple from copy import deepcopy import unittest.mock as mock -from unittest.mock import sentinel +from unittest.mock import Mock, sentinel from cf_units import Unit import numpy as np @@ -2086,8 +2086,13 @@ def setUp(self): # # src-to-tgt mapping: # 0->1, 1->2, 2->3 - self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) - self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) + self.points = ( + sentinel.points_0, + sentinel.points_1, + sentinel.points_2, + sentinel.points_3, + ) + self.bounds = sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2 self.pb_0 = ( mock.Mock(copy=mock.Mock(return_value=self.points[0])), mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), @@ -2121,9 +2126,13 @@ def setUp(self): ) metadata = [self.src_metadata] * len(self.mapping) self.src_coords = [ - sentinel.src_coord_0, - sentinel.src_coord_1, - sentinel.src_coord_2, + # N.B. these need to mimic a Coord with points and bounds, and + # be of a class which is not-a-MeshCoord. + # NOTE: strictly, bounds should =above values, and support .copy(). + # For these tests, just omitting them works + is simpler. + Mock(spec=DimCoord, points=self.points[0], bounds=None), + Mock(spec=DimCoord, points=self.points[1], bounds=None), + Mock(spec=DimCoord, points=self.points[2], bounds=None), ] self.src_dims_common = [0, 1, 2] self.container = DimCoord @@ -2142,10 +2151,14 @@ def setUp(self): sentinel.tgt_metadata_3, ] self.tgt_coords = [ - sentinel.tgt_coord_0, - sentinel.tgt_coord_1, - sentinel.tgt_coord_2, - sentinel.tgt_coord_3, + # N.B. these need to mimic a Coord with points and bounds, and + # be of a class which is not-a-MeshCoord. + # NOTE: strictly, bounds should =above values, and support .copy(). + # For these tests, just omitting them works + is simpler. + Mock(spec=DimCoord, points=self.points[0], bounds=None), + Mock(spec=DimCoord, points=self.points[1], bounds=None), + Mock(spec=DimCoord, points=self.points[2], bounds=None), + Mock(spec=DimCoord, points=self.points[3], bounds=None), ] self.tgt_dims_common = [1, 2, 3] self.tgt_dim_coverage = _DimCoverage( @@ -2275,7 +2288,12 @@ def setUp(self): # # src-to-tgt mapping: # 0->1, 1->2, 2->3 - self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) + self.points = ( + sentinel.points_0, + sentinel.points_1, + sentinel.points_2, + sentinel.points_3, + ) self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) self.pb_0 = ( mock.Mock(copy=mock.Mock(return_value=self.points[0])), @@ -2318,9 +2336,13 @@ def setUp(self): ), ] self.src_coords = [ - sentinel.src_coord_0, - sentinel.src_coord_1, - sentinel.src_coord_2, + # N.B. these need to mimic a Coord with points and bounds, but also + # the type() defines the 'container' property of a prepared item. + # It seems that 'type()' is not fake-able in Python, so we need to + # provide *real* DimCoords, to match "self.container" below. + DimCoord(points=[0], bounds=None), + DimCoord(points=[1], bounds=None), + DimCoord(points=[2], bounds=None), ] self.src_dims = [(dim,) for dim in self.mapping.keys()] self.src_common_items = [ @@ -2329,10 +2351,14 @@ def setUp(self): ] self.tgt_metadata = [sentinel.tgt_metadata_0] + self.src_metadata self.tgt_coords = [ - sentinel.tgt_coord_0, - sentinel.tgt_coord_1, - sentinel.tgt_coord_2, - sentinel.tgt_coord_3, + # N.B. these need to mimic a Coord with points and bounds, but also + # the type() defines the 'container' property of a prepared item. + # It seems that 'type()' is not fake-able in Python, so we need to + # provide *real* DimCoords, to match "self.container" below. + DimCoord(points=[0], bounds=None), + DimCoord(points=[1], bounds=None), + DimCoord(points=[2], bounds=None), + DimCoord(points=[3], bounds=None), ] self.tgt_dims = [None] + [(dim,) for dim in self.mapping.values()] self.tgt_common_items = [ @@ -4624,6 +4650,11 @@ def setUp(self): self.resolve.prepared_category = prepared_category self.resolve.prepared_factories = prepared_factories + # Required to stop mock 'containers' failing in an 'issubclass' call. + self.patch( + "iris.common.resolve.issubclass", mock.Mock(return_value=False) + ) + def test_no_resolved_shape(self): self.resolve._broadcast_shape = None data = None diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index b3870a7901..cc20cdfa1f 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -24,7 +24,7 @@ def setUp(self): data = np.arange(nt, dtype=np.float32) cube = Cube(data, standard_name="air_temperature", units="K") # Temporal coordinate. - t_units = Unit("hours since 1970-01-01 00:00:00", calendar="gregorian") + t_units = Unit("hours since 1970-01-01 00:00:00", calendar="standard") t_coord = DimCoord( points=np.arange(nt), standard_name="time", units=t_units ) diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 2af568f077..96d13d7d15 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -30,7 +30,7 @@ def simple_1d_time_cubes(self, reftimes, coords_points): standard_name="air_temperature", units="K", ) - unit = cf_units.Unit(reftime, calendar="gregorian") + unit = cf_units.Unit(reftime, calendar="standard") coord = iris.coords.DimCoord( points=np.array(coord_points, dtype=np.float32), standard_name="time", @@ -58,7 +58,7 @@ def setUp(self): cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") # Time coord t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" + "hours since 1970-01-01 00:00:00", calendar="standard" ) t_coord = iris.coords.DimCoord( points=np.arange(2, dtype=np.float32), diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index b7c59ff566..0c20f16f5a 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -36,7 +36,7 @@ def test_vectorise_call(self): # The reason we use numpy.vectorize is to support multi-dimensional # coordinate points. def fn(coord, v): - return v ** 2 + return v**2 with mock.patch( "numpy.vectorize", return_value=self.vectorised diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index 86230c84b9..418ac72557 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -32,7 +32,7 @@ def setUp(self): time_coord = iris.coords.DimCoord( hour_numbers, standard_name="time", - units=cf_units.Unit("hours since epoch", "gregorian"), + units=cf_units.Unit("hours since epoch", "standard"), ) cube.add_dim_coord(time_coord, 0) diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index 33efaef9da..ba04c77d57 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -29,7 +29,9 @@ def test_repr(self): "Mercator(longitude_of_projection_origin=90.0, " "ellipsoid=GeogCS(semi_major_axis=6377563.396, " "semi_minor_axis=6356256.909), " - "standard_parallel=0.0)" + "standard_parallel=0.0, " + "scale_factor_at_projection_origin=None, " + "false_easting=0.0, false_northing=0.0)" ) self.assertEqual(expected, repr(self.tm)) @@ -38,16 +40,31 @@ class Test_init_defaults(tests.IrisTest): def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. crs = Mercator( - longitude_of_projection_origin=27, standard_parallel=157.4 + longitude_of_projection_origin=27, + standard_parallel=157.4, + false_easting=13, + false_northing=12, ) self.assertEqualAndKind(crs.longitude_of_projection_origin, 27.0) self.assertEqualAndKind(crs.standard_parallel, 157.4) + self.assertEqualAndKind(crs.false_easting, 13.0) + self.assertEqualAndKind(crs.false_northing, 12.0) + + def test_set_optional_scale_factor_alternative(self): + # Check that setting the optional (non-ellipse) args works. + crs = Mercator( + scale_factor_at_projection_origin=1.3, + ) + self.assertEqualAndKind(crs.scale_factor_at_projection_origin, 1.3) def _check_crs_defaults(self, crs): # Check for property defaults when no kwargs options were set. # NOTE: except ellipsoid, which is done elsewhere. self.assertEqualAndKind(crs.longitude_of_projection_origin, 0.0) self.assertEqualAndKind(crs.standard_parallel, 0.0) + self.assertEqualAndKind(crs.false_easting, 0.0) + self.assertEqualAndKind(crs.false_northing, 0.0) + self.assertEqualAndKind(crs.scale_factor_at_projection_origin, None) def test_no_optional_args(self): # Check expected defaults with no optional args. @@ -57,7 +74,11 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. crs = Mercator( - longitude_of_projection_origin=None, standard_parallel=None + longitude_of_projection_origin=None, + standard_parallel=None, + scale_factor_at_projection_origin=None, + false_easting=None, + false_northing=None, ) self._check_crs_defaults(crs) @@ -77,6 +98,8 @@ def test_extra_kwargs(self): # converted to a cartopy CRS. longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -85,6 +108,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -95,6 +120,33 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, + ) + + res = merc_cs.as_cartopy_crs() + self.assertEqual(res, expected) + + def test_extra_kwargs_scale_factor_alternative(self): + # Check that a projection with non-default values is correctly + # converted to a cartopy CRS. + scale_factor_at_projection_origin = 1.3 + ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) + + merc_cs = Mercator( + ellipsoid=ellipsoid, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ) + + expected = ccrs.Mercator( + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + scale_factor=scale_factor_at_projection_origin, ) res = merc_cs.as_cartopy_crs() @@ -113,6 +165,8 @@ def test_simple(self): def test_extra_kwargs(self): longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -121,6 +175,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -131,6 +187,31 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, + ) + + res = merc_cs.as_cartopy_projection() + self.assertEqual(res, expected) + + def test_extra_kwargs_scale_factor_alternative(self): + ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) + scale_factor_at_projection_origin = 1.3 + + merc_cs = Mercator( + ellipsoid=ellipsoid, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ) + + expected = ccrs.Mercator( + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + scale_factor=scale_factor_at_projection_origin, ) res = merc_cs.as_cartopy_projection() diff --git a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py new file mode 100755 index 0000000000..25f5d24800 --- /dev/null +++ b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py @@ -0,0 +1,251 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.coord_systems.PolarStereographic` class.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import cartopy.crs as ccrs + +from iris.coord_systems import GeogCS, PolarStereographic + + +class Test_PolarStereographic__basics(tests.IrisTest): + def setUp(self): + self.ps_blank = PolarStereographic( + central_lat=90.0, + central_lon=0, + ellipsoid=GeogCS(6377563.396, 6356256.909), + ) + self.ps_standard_parallel = PolarStereographic( + central_lat=90.0, + central_lon=0, + true_scale_lat=30, + ellipsoid=GeogCS(6377563.396, 6356256.909), + ) + self.ps_scale_factor = PolarStereographic( + central_lat=90.0, + central_lon=0, + scale_factor_at_projection_origin=1.1, + ellipsoid=GeogCS(6377563.396, 6356256.909), + ) + + def test_construction(self): + self.assertXMLElement( + self.ps_blank, ("coord_systems", "PolarStereographic.xml") + ) + + def test_construction_sp(self): + self.assertXMLElement( + self.ps_standard_parallel, + ("coord_systems", "PolarStereographicStandardParallel.xml"), + ) + + def test_construction_sf(self): + self.assertXMLElement( + self.ps_scale_factor, + ("coord_systems", "PolarStereographicScaleFactor.xml"), + ) + + def test_repr_blank(self): + expected = ( + "PolarStereographic(central_lat=90.0, central_lon=0.0, " + "false_easting=0.0, false_northing=0.0, " + "true_scale_lat=None, " + "ellipsoid=GeogCS(semi_major_axis=6377563.396, " + "semi_minor_axis=6356256.909))" + ) + self.assertEqual(expected, repr(self.ps_blank)) + + def test_repr_standard_parallel(self): + expected = ( + "PolarStereographic(central_lat=90.0, central_lon=0.0, " + "false_easting=0.0, false_northing=0.0, " + "true_scale_lat=30.0, " + "ellipsoid=GeogCS(semi_major_axis=6377563.396, " + "semi_minor_axis=6356256.909))" + ) + self.assertEqual(expected, repr(self.ps_standard_parallel)) + + def test_repr_scale_factor(self): + expected = ( + "PolarStereographic(central_lat=90.0, central_lon=0.0, " + "false_easting=0.0, false_northing=0.0, " + "scale_factor_at_projection_origin=1.1, " + "ellipsoid=GeogCS(semi_major_axis=6377563.396, " + "semi_minor_axis=6356256.909))" + ) + self.assertEqual(expected, repr(self.ps_scale_factor)) + + +class Test_init_defaults(tests.IrisTest): + def test_set_optional_args(self): + # Check that setting the optional (non-ellipse) args works. + crs = PolarStereographic( + central_lat=90, + central_lon=50, + false_easting=13, + false_northing=12, + true_scale_lat=32, + ) + self.assertEqualAndKind(crs.central_lat, 90.0) + self.assertEqualAndKind(crs.central_lon, 50.0) + self.assertEqualAndKind(crs.false_easting, 13.0) + self.assertEqualAndKind(crs.false_northing, 12.0) + self.assertEqualAndKind(crs.true_scale_lat, 32.0) + + def test_set_optional_scale_factor_alternative(self): + # Check that setting the optional (non-ellipse) args works. + crs = PolarStereographic( + central_lat=-90, + central_lon=50, + false_easting=13, + false_northing=12, + scale_factor_at_projection_origin=3.1, + ) + self.assertEqualAndKind(crs.central_lat, -90.0) + self.assertEqualAndKind(crs.central_lon, 50.0) + self.assertEqualAndKind(crs.false_easting, 13.0) + self.assertEqualAndKind(crs.false_northing, 12.0) + self.assertEqualAndKind(crs.scale_factor_at_projection_origin, 3.1) + + def _check_crs_defaults(self, crs): + # Check for property defaults when no kwargs options were set. + # NOTE: except ellipsoid, which is done elsewhere. + self.assertEqualAndKind(crs.false_easting, 0.0) + self.assertEqualAndKind(crs.false_northing, 0.0) + self.assertEqualAndKind(crs.true_scale_lat, None) + self.assertEqualAndKind(crs.scale_factor_at_projection_origin, None) + + def test_no_optional_args(self): + # Check expected defaults with no optional args. + crs = PolarStereographic( + central_lat=-90, + central_lon=50, + ) + self._check_crs_defaults(crs) + + def test_optional_args_None(self): + # Check expected defaults with optional args=None. + crs = PolarStereographic( + central_lat=-90, + central_lon=50, + true_scale_lat=None, + scale_factor_at_projection_origin=None, + false_easting=None, + false_northing=None, + ) + self._check_crs_defaults(crs) + + +class AsCartopyMixin: + def test_simple(self): + # Check that a projection set up with all the defaults is correctly + # converted to a cartopy CRS. + central_lat = -90 + central_lon = 50 + polar_cs = PolarStereographic( + central_lat=central_lat, + central_lon=central_lon, + ) + res = self.as_cartopy_method(polar_cs) + expected = ccrs.Stereographic( + central_latitude=central_lat, + central_longitude=central_lon, + globe=ccrs.Globe(), + ) + self.assertEqual(res, expected) + + def test_extra_kwargs_scale_factor(self): + # Check that a projection with non-default values is correctly + # converted to a cartopy CRS. + central_lat = -90 + central_lon = 50 + scale_factor_at_projection_origin = 1.3 + false_easting = 13 + false_northing = 15 + ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) + + polar_cs = PolarStereographic( + central_lat=central_lat, + central_lon=central_lon, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid, + ) + + expected = ccrs.Stereographic( + central_latitude=central_lat, + central_longitude=central_lon, + false_easting=false_easting, + false_northing=false_northing, + scale_factor=scale_factor_at_projection_origin, + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + ) + + res = self.as_cartopy_method(polar_cs) + self.assertEqual(res, expected) + + def test_extra_kwargs_true_scale_lat_alternative(self): + # Check that a projection with non-default values is correctly + # converted to a cartopy CRS. + central_lat = -90 + central_lon = 50 + true_scale_lat = 80 + false_easting = 13 + false_northing = 15 + ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) + + polar_cs = PolarStereographic( + central_lat=central_lat, + central_lon=central_lon, + true_scale_lat=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid, + ) + + expected = ccrs.Stereographic( + central_latitude=central_lat, + central_longitude=central_lon, + false_easting=false_easting, + false_northing=false_northing, + true_scale_latitude=true_scale_lat, + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + ) + + res = self.as_cartopy_method(polar_cs) + self.assertEqual(res, expected) + + +class Test_PolarStereographic__as_cartopy_crs(tests.IrisTest, AsCartopyMixin): + def setUp(self): + self.as_cartopy_method = PolarStereographic.as_cartopy_crs + + +class Test_PolarStereographic__as_cartopy_projection( + tests.IrisTest, AsCartopyMixin +): + def setUp(self): + self.as_cartopy_method = PolarStereographic.as_cartopy_projection + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_Stereographic.py b/lib/iris/tests/unit/coord_systems/test_Stereographic.py index fac411f9d5..acd77112c1 100644 --- a/lib/iris/tests/unit/coord_systems/test_Stereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_Stereographic.py @@ -9,12 +9,29 @@ # importing anything else. import iris.tests as tests # isort:skip -from iris.coord_systems import Stereographic +import cartopy.crs as ccrs + +from iris.coord_systems import GeogCS, Stereographic + + +def stereo(**kwargs): + return Stereographic( + central_lat=-90, + central_lon=-45, + false_easting=100, + false_northing=200, + ellipsoid=GeogCS(6377563.396, 6356256.909), + **kwargs, + ) + + +class Test_Stereographic_construction(tests.IrisTest): + def test_stereo(self): + st = stereo() + self.assertXMLElement(st, ("coord_systems", "Stereographic.xml")) class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for Stereographic is in the legacy test module - # 'iris.tests.test_coordsystem'. # This class *only* tests the defaults for optional constructor args. def test_set_optional_args(self): @@ -26,12 +43,26 @@ def test_set_optional_args(self): self.assertEqualAndKind(crs.false_northing, -203.7) self.assertEqualAndKind(crs.true_scale_lat, 77.0) + def test_set_optional_args_scale_factor_alternative(self): + # Check that setting the optional (non-ellipse) args works. + crs = Stereographic( + 0, + 0, + false_easting=100, + false_northing=-203.7, + scale_factor_at_projection_origin=1.3, + ) + self.assertEqualAndKind(crs.false_easting, 100.0) + self.assertEqualAndKind(crs.false_northing, -203.7) + self.assertEqualAndKind(crs.scale_factor_at_projection_origin, 1.3) + def _check_crs_defaults(self, crs): # Check for property defaults when no kwargs options were set. # NOTE: except ellipsoid, which is done elsewhere. self.assertEqualAndKind(crs.false_easting, 0.0) self.assertEqualAndKind(crs.false_northing, 0.0) self.assertIsNone(crs.true_scale_lat) + self.assertIsNone(crs.scale_factor_at_projection_origin) def test_no_optional_args(self): # Check expected defaults with no optional args. @@ -41,10 +72,141 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. crs = Stereographic( - 0, 0, false_easting=None, false_northing=None, true_scale_lat=None + 0, + 0, + false_easting=None, + false_northing=None, + true_scale_lat=None, + scale_factor_at_projection_origin=None, ) self._check_crs_defaults(crs) +class Test_Stereographic_repr(tests.IrisTest): + def test_stereo(self): + st = stereo() + expected = ( + "Stereographic(central_lat=-90.0, central_lon=-45.0, " + "false_easting=100.0, false_northing=200.0, true_scale_lat=None, " + "ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))" + ) + self.assertEqual(expected, repr(st)) + + def test_stereo_scale_factor(self): + st = stereo(scale_factor_at_projection_origin=0.9) + expected = ( + "Stereographic(central_lat=-90.0, central_lon=-45.0, " + "false_easting=100.0, false_northing=200.0, " + "scale_factor_at_projection_origin=0.9, " + "ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))" + ) + self.assertEqual(expected, repr(st)) + + +class AsCartopyMixin: + def test_basic(self): + latitude_of_projection_origin = -90.0 + longitude_of_projection_origin = -45.0 + false_easting = 100.0 + false_northing = 200.0 + ellipsoid = GeogCS(6377563.396, 6356256.909) + + st = Stereographic( + central_lat=latitude_of_projection_origin, + central_lon=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid, + ) + expected = ccrs.Stereographic( + central_latitude=latitude_of_projection_origin, + central_longitude=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + ) + + res = self.as_cartopy_method(st) + self.assertEqual(res, expected) + + def test_true_scale_lat(self): + latitude_of_projection_origin = -90.0 + longitude_of_projection_origin = -45.0 + false_easting = 100.0 + false_northing = 200.0 + true_scale_lat = 30 + ellipsoid = GeogCS(6377563.396, 6356256.909) + + st = Stereographic( + central_lat=latitude_of_projection_origin, + central_lon=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + true_scale_lat=true_scale_lat, + ellipsoid=ellipsoid, + ) + expected = ccrs.Stereographic( + central_latitude=latitude_of_projection_origin, + central_longitude=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + true_scale_latitude=true_scale_lat, + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + ) + + res = self.as_cartopy_method(st) + self.assertEqual(res, expected) + + def test_scale_factor(self): + latitude_of_projection_origin = -90.0 + longitude_of_projection_origin = -45.0 + false_easting = 100.0 + false_northing = 200.0 + scale_factor_at_projection_origin = 0.9 + ellipsoid = GeogCS(6377563.396, 6356256.909) + + st = Stereographic( + central_lat=latitude_of_projection_origin, + central_lon=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ellipsoid=ellipsoid, + ) + expected = ccrs.Stereographic( + central_latitude=latitude_of_projection_origin, + central_longitude=longitude_of_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + scale_factor=scale_factor_at_projection_origin, + globe=ccrs.Globe( + semimajor_axis=6377563.396, + semiminor_axis=6356256.909, + ellipse=None, + ), + ) + + res = self.as_cartopy_method(st) + self.assertEqual(res, expected) + + +class Test_Stereographic_as_cartopy_crs(tests.IrisTest, AsCartopyMixin): + def setUp(self): + self.as_cartopy_method = Stereographic.as_cartopy_crs + + +class Test_Stereographic_as_cartopy_projection(tests.IrisTest, AsCartopyMixin): + def setUp(self): + self.as_cartopy_method = Stereographic.as_cartopy_projection + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index 4d520ac414..75b6250449 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -458,7 +458,7 @@ def test_time_values(self): [ ( "AncillaryVariable : time of previous valid detection / " - "(hours since 1970-01-01 01:00, gregorian calendar)" + "(hours since 1970-01-01 01:00, standard calendar)" ), ( " data: [1970-01-01 03:00:00, 1970-01-01 06:00:00, " diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 43170b6c4e..dca6ed3c1b 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -74,6 +74,11 @@ def test_scalar(self): target = [0, 0, 0, 0, 0] self._test_nearest_neighbour_index(target) + def test_bounded_float_point(self): + coord = DimCoord(1, bounds=[0, 2]) + result = coord.nearest_neighbour_index(2.5) + self.assertEqual(result, 0) + class Test_nearest_neighbour_index__descending(tests.IrisTest): def setUp(self): @@ -327,7 +332,8 @@ def test_dim_1d(self): ) for units in ["unknown", "no_unit", 1, "K"]: coord.units = units - collapsed_coord = coord.collapsed() + with self.assertNoWarningsRegexp(): + collapsed_coord = coord.collapsed() self.assertArrayEqual( collapsed_coord.points, np.mean(coord.points) ) @@ -469,6 +475,98 @@ def test_lazy_nd_points_and_bounds(self): self.assertArrayEqual(collapsed_coord.points, da.array([55])) self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]])) + def test_numeric_nd_multidim_bounds_warning(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real, long_name="y") + + msg = ( + "Collapsing a multi-dimensional coordinate. " + "Metadata may not be fully descriptive for 'y'." + ) + with self.assertWarnsRegex(UserWarning, msg): + coord.collapsed() + + def test_lazy_nd_multidim_bounds_warning(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy, long_name="y") + + msg = ( + "Collapsing a multi-dimensional coordinate. " + "Metadata may not be fully descriptive for 'y'." + ) + with self.assertWarnsRegex(UserWarning, msg): + coord.collapsed() + + def test_numeric_nd_noncontiguous_bounds_warning(self): + self.setupTestArrays((3)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real, long_name="y") + + msg = ( + "Collapsing a non-contiguous coordinate. " + "Metadata may not be fully descriptive for 'y'." + ) + with self.assertWarnsRegex(UserWarning, msg): + coord.collapsed() + + def test_lazy_nd_noncontiguous_bounds_warning(self): + self.setupTestArrays((3)) + coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy, long_name="y") + + msg = ( + "Collapsing a non-contiguous coordinate. " + "Metadata may not be fully descriptive for 'y'." + ) + with self.assertWarnsRegex(UserWarning, msg): + coord.collapsed() + + def test_numeric_3_bounds(self): + + points = np.array([2.0, 6.0, 4.0]) + bounds = np.array([[1.0, 0.0, 3.0], [5.0, 4.0, 7.0], [3.0, 2.0, 5.0]]) + + coord = AuxCoord(points, bounds=bounds, long_name="x") + + msg = ( + r"Cannot check if coordinate is contiguous: Invalid operation for " + r"'x', with 3 bound\(s\). Contiguous bounds are only defined for " + r"1D coordinates with 2 bounds. Metadata may not be fully " + r"descriptive for 'x'. Ignoring bounds." + ) + with self.assertWarnsRegex(UserWarning, msg): + collapsed_coord = coord.collapsed() + + self.assertFalse(collapsed_coord.has_lazy_points()) + self.assertFalse(collapsed_coord.has_lazy_bounds()) + + self.assertArrayAlmostEqual(collapsed_coord.points, np.array([4.0])) + self.assertArrayAlmostEqual( + collapsed_coord.bounds, np.array([[2.0, 6.0]]) + ) + + def test_lazy_3_bounds(self): + + points = da.arange(3) * 2.0 + bounds = da.arange(3 * 3).reshape(3, 3) + + coord = AuxCoord(points, bounds=bounds, long_name="x") + + msg = ( + r"Cannot check if coordinate is contiguous: Invalid operation for " + r"'x', with 3 bound\(s\). Contiguous bounds are only defined for " + r"1D coordinates with 2 bounds. Metadata may not be fully " + r"descriptive for 'x'. Ignoring bounds." + ) + with self.assertWarnsRegex(UserWarning, msg): + collapsed_coord = coord.collapsed() + + self.assertTrue(collapsed_coord.has_lazy_points()) + self.assertTrue(collapsed_coord.has_lazy_bounds()) + + self.assertArrayAlmostEqual(collapsed_coord.points, da.array([2.0])) + self.assertArrayAlmostEqual( + collapsed_coord.bounds, da.array([[0.0, 4.0]]) + ) + class Test_is_compatible(tests.IrisTest): def setUp(self): @@ -885,7 +983,7 @@ def test_short_time_interval(self): ) expected = "\n".join( [ - "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + "DimCoord : time / (days since 1970-01-01, standard calendar)", " points: [1970-01-06 00:00:00]", " shape: (1,)", " dtype: int64", @@ -902,7 +1000,7 @@ def test_short_time_interval__bounded(self): coord.guess_bounds() expected = "\n".join( [ - "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + "DimCoord : time / (days since 1970-01-01, standard calendar)", " points: [1970-01-06 00:00:00, 1970-01-07 00:00:00]", " bounds: [", " [1970-01-05 12:00:00, 1970-01-06 12:00:00],", @@ -921,7 +1019,7 @@ def test_long_time_interval(self): ) expected = "\n".join( [ - "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + "DimCoord : time / (years since 1970-01-01, standard calendar)", " points: [5]", " shape: (1,)", " dtype: int64", @@ -938,7 +1036,7 @@ def test_long_time_interval__bounded(self): coord.guess_bounds() expected = "\n".join( [ - "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + "DimCoord : time / (years since 1970-01-01, standard calendar)", " points: [5, 6]", " bounds: [", " [4.5, 5.5],", @@ -1059,7 +1157,7 @@ class Test___init____abstractmethod(tests.IrisTest): def test(self): emsg = ( "Can't instantiate abstract class Coord with abstract" - " methods __init__" + " method.* __init__" ) with self.assertRaisesRegex(TypeError, emsg): _ = Coord(points=[0, 1]) diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index e10d228ef6..4298b140ea 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -609,6 +609,18 @@ def test_copy_array(self): bnds[1, 1] = 10 self.assertEqual(coord.bounds[1, 1], 5) + def test_flip_contiguous(self): + pts = np.arange(4) + bnds = np.transpose([np.arange(1, 5), np.arange(4)]) + coord = DimCoord(pts, bounds=bnds) + self.assertArrayEqual(coord.bounds, bnds[:, ::-1]) + + def test_flip_contiguous_decreasing(self): + pts = np.arange(4, 0, -1) + bnds = np.transpose([np.arange(4, 0, -1), np.arange(5, 1, -1)]) + coord = DimCoord(pts, bounds=bnds) + self.assertArrayEqual(coord.bounds, bnds[:, ::-1]) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index fd10a6f264..f9316ff92c 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -511,7 +511,7 @@ def test_onepoint_toolong_placeholder(self): result = self.coord_representations(shape=(2,), dates=True) expected = [ "", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [1970-03-05 00:00:00, 1970-03-06 00:00:00]", " shape: (2,)", " dtype: float64", @@ -540,7 +540,7 @@ def test_dates_scalar(self): ), ( "AuxCoord : time / (hours since 2025-03-23 01:00:00, " - "gregorian calendar)" + "standard calendar)" ), " points: [2025-03-23 01:00:00]", " shape: (1,)", @@ -553,7 +553,7 @@ def test_dates_bounds(self): result = self.coord_representations(dates=True, bounded=True) expected = [ "", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [", " 1970-03-05 00:00:00, 1970-03-06 00:00:00,", " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", @@ -574,7 +574,7 @@ def test_dates_masked(self): result = self.coord_representations(dates=True, masked=True) expected = [ "", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [", " 1970-03-05 00:00:00, -- ,", " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", @@ -749,7 +749,7 @@ def test_climatological(self): ), ( "DimCoord : time / (days since 1970-01-01 00:00:00-00, " - "gregorian calendar)" + "standard calendar)" ), " points: [2001-01-10 00:00:00]", " bounds: [[2001-01-10 00:00:00, 2011-01-10 00:00:00]]", @@ -1054,7 +1054,7 @@ def test_convert_dates(self): coord = self.sample_coord(dates=True) result = coord.summary() expected = [ - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [", ( " 1970-03-05 00:00:00, 1970-03-06 00:00:00, " @@ -1069,7 +1069,7 @@ def test_convert_dates(self): result = coord.summary(convert_dates=False) expected = [ - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [0., 1., 2., 3., 4.]", " shape: (5,)", " dtype: float64", diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 7d56b505bd..f38d6ef35d 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -15,6 +15,7 @@ from cf_units import Unit import numpy as np import numpy.ma as ma +import pytest from iris._lazy_data import as_lazy_data import iris.analysis @@ -564,6 +565,67 @@ def test_no_lat_weighted_aggregator_mixed(self): self._assert_nowarn_collapse_without_weight(coords, warn) +class Test_collapsed_coord_with_3_bounds(tests.IrisTest): + def setUp(self): + self.cube = Cube([1, 2]) + + bounds = [[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]] + lat = AuxCoord([1.0, 2.0], bounds=bounds, standard_name="latitude") + lon = AuxCoord([1.0, 2.0], bounds=bounds, standard_name="longitude") + + self.cube.add_aux_coord(lat, 0) + self.cube.add_aux_coord(lon, 0) + + def _assert_warn_cannot_check_contiguity(self, warn): + # Ensure that warning is raised. + for coord in ["latitude", "longitude"]: + msg = ( + f"Cannot check if coordinate is contiguous: Invalid " + f"operation for '{coord}', with 3 bound(s). Contiguous " + f"bounds are only defined for 1D coordinates with 2 " + f"bounds. Metadata may not be fully descriptive for " + f"'{coord}'. Ignoring bounds." + ) + self.assertIn(mock.call(msg), warn.call_args_list) + + def _assert_cube_as_expected(self, cube): + """Ensure that cube data and coordiantes are as expected.""" + self.assertArrayEqual(cube.data, np.array(3)) + + lat = cube.coord("latitude") + self.assertArrayAlmostEqual(lat.points, np.array([1.5])) + self.assertArrayAlmostEqual(lat.bounds, np.array([[1.0, 2.0]])) + + lon = cube.coord("longitude") + self.assertArrayAlmostEqual(lon.points, np.array([1.5])) + self.assertArrayAlmostEqual(lon.bounds, np.array([[1.0, 2.0]])) + + def test_collapsed_lat_with_3_bounds(self): + """Collapse latitude with 3 bounds.""" + with mock.patch("warnings.warn") as warn: + collapsed_cube = self.cube.collapsed("latitude", iris.analysis.SUM) + self._assert_warn_cannot_check_contiguity(warn) + self._assert_cube_as_expected(collapsed_cube) + + def test_collapsed_lon_with_3_bounds(self): + """Collapse longitude with 3 bounds.""" + with mock.patch("warnings.warn") as warn: + collapsed_cube = self.cube.collapsed( + "longitude", iris.analysis.SUM + ) + self._assert_warn_cannot_check_contiguity(warn) + self._assert_cube_as_expected(collapsed_cube) + + def test_collapsed_lat_lon_with_3_bounds(self): + """Collapse latitude and longitude with 3 bounds.""" + with mock.patch("warnings.warn") as warn: + collapsed_cube = self.cube.collapsed( + ["latitude", "longitude"], iris.analysis.SUM + ) + self._assert_warn_cannot_check_contiguity(warn) + self._assert_cube_as_expected(collapsed_cube) + + class Test_summary(tests.IrisTest): def setUp(self): self.cube = Cube(0) @@ -676,294 +738,6 @@ def test_different_array_attrs_incompatible(self): self.assertFalse(self.test_cube.is_compatible(self.other_cube)) -class Test_aggregated_by(tests.IrisTest): - def setUp(self): - self.cube = Cube(np.arange(44).reshape(4, 11)) - - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) - label_coord = AuxCoord( - [ - "alpha", - "alpha", - "beta", - "beta", - "alpha", - "gamma", - "alpha", - "alpha", - "alpha", - "gamma", - "beta", - ], - long_name="label", - units="no_unit", - ) - simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") - spanning_coord = AuxCoord( - np.arange(44).reshape(4, 11), long_name="spanning" - ) - spanning_label_coord = AuxCoord( - np.arange(1, 441, 10).reshape(4, 11).astype(str), - long_name="span_label", - units="no_unit", - ) - - self.cube.add_aux_coord(simple_agg_coord, 0) - self.cube.add_aux_coord(val_coord, 1) - self.cube.add_aux_coord(label_coord, 1) - self.cube.add_aux_coord(spanning_coord, (0, 1)) - self.cube.add_aux_coord(spanning_label_coord, (0, 1)) - - self.mock_agg = mock.Mock(spec=Aggregator) - self.mock_agg.cell_method = [] - self.mock_agg.aggregate = mock.Mock( - return_value=mock.Mock(dtype="object") - ) - self.mock_agg.aggregate_shape = mock.Mock(return_value=()) - self.mock_agg.lazy_func = None - self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) - - self.ancillary_variable = AncillaryVariable( - [0, 1, 2, 3], long_name="foo" - ) - self.cube.add_ancillary_variable(self.ancillary_variable, 0) - self.cell_measure = CellMeasure([0, 1, 2, 3], long_name="bar") - self.cube.add_cell_measure(self.cell_measure, 0) - - def test_2d_coord_simple_agg(self): - # For 2d coords, slices of aggregated coord should be the same as - # aggregated slices. - res_cube = self.cube.aggregated_by("simple_agg", self.mock_agg) - for res_slice, cube_slice in zip( - res_cube.slices("simple_agg"), self.cube.slices("simple_agg") - ): - cube_slice_agg = cube_slice.aggregated_by( - "simple_agg", self.mock_agg - ) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - self.assertEqual( - res_slice.coord("span_label"), - cube_slice_agg.coord("span_label"), - ) - - def test_agg_by_label(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - res_cube = self.cube.aggregated_by("label", self.mock_agg) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[0, 2], [0, 1], [0, 2]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_agg_by_label_bounded(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - val_points = self.cube.coord("val").points - self.cube.coord("val").bounds = np.array( - [val_points - 0.5, val_points + 0.5] - ).T - res_cube = self.cube.aggregated_by("label", self.mock_agg) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[-0.5, 2.5], [-0.5, 1.5], [-0.5, 2.5]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_2d_agg_by_label(self): - res_cube = self.cube.aggregated_by("label", self.mock_agg) - # For 2d coord, slices of aggregated coord should be the same as - # aggregated slices. - for res_slice, cube_slice in zip( - res_cube.slices("val"), self.cube.slices("val") - ): - cube_slice_agg = cube_slice.aggregated_by("label", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - - def test_agg_by_val(self): - # Aggregate a cube on a numeric coordinate val where label - # and val entries are not in step; the resulting cube has a label - # coord with serialised labels from the aggregated cells. - res_cube = self.cube.aggregated_by("val", self.mock_agg) - val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") - exp0 = "alpha|alpha|beta|alpha|alpha|gamma" - exp1 = "beta|alpha|beta" - exp2 = "gamma|alpha" - label_coord = AuxCoord( - np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_2d_agg_by_val(self): - res_cube = self.cube.aggregated_by("val", self.mock_agg) - # For 2d coord, slices of aggregated coord should be the same as - # aggregated slices. - for res_slice, cube_slice in zip( - res_cube.slices("val"), self.cube.slices("val") - ): - cube_slice_agg = cube_slice.aggregated_by("val", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - - def test_single_string_aggregation(self): - aux_coords = [ - (AuxCoord(["a", "b", "a"], long_name="foo"), 0), - (AuxCoord(["a", "a", "a"], long_name="bar"), 0), - ] - cube = iris.cube.Cube( - np.arange(12).reshape(3, 4), aux_coords_and_dims=aux_coords - ) - result = cube.aggregated_by("foo", MEAN) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) - - def test_ancillary_variables_and_cell_measures_kept(self): - cube_agg = self.cube.aggregated_by("val", self.mock_agg) - self.assertEqual( - cube_agg.ancillary_variables(), [self.ancillary_variable] - ) - self.assertEqual(cube_agg.cell_measures(), [self.cell_measure]) - - def test_ancillary_variables_and_cell_measures_removed(self): - cube_agg = self.cube.aggregated_by("simple_agg", self.mock_agg) - self.assertEqual(cube_agg.ancillary_variables(), []) - self.assertEqual(cube_agg.cell_measures(), []) - - -class Test_aggregated_by__lazy(tests.IrisTest): - def setUp(self): - self.data = np.arange(44).reshape(4, 11) - self.lazydata = as_lazy_data(self.data) - self.cube = Cube(self.lazydata) - - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) - label_coord = AuxCoord( - [ - "alpha", - "alpha", - "beta", - "beta", - "alpha", - "gamma", - "alpha", - "alpha", - "alpha", - "gamma", - "beta", - ], - long_name="label", - units="no_unit", - ) - simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") - - self.label_mean = np.array( - [ - [4.0 + 1.0 / 3.0, 5.0, 7.0], - [15.0 + 1.0 / 3.0, 16.0, 18.0], - [26.0 + 1.0 / 3.0, 27.0, 29.0], - [37.0 + 1.0 / 3.0, 38.0, 40.0], - ] - ) - self.val_mean = np.array( - [ - [4.0 + 1.0 / 6.0, 5.0 + 2.0 / 3.0, 6.5], - [15.0 + 1.0 / 6.0, 16.0 + 2.0 / 3.0, 17.5], - [26.0 + 1.0 / 6.0, 27.0 + 2.0 / 3.0, 28.5], - [37.0 + 1.0 / 6.0, 38.0 + 2.0 / 3.0, 39.5], - ] - ) - - self.cube.add_aux_coord(simple_agg_coord, 0) - self.cube.add_aux_coord(val_coord, 1) - self.cube.add_aux_coord(label_coord, 1) - - def test_agg_by_label__lazy(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - res_cube = self.cube.aggregated_by("label", MEAN) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[0, 2], [0, 1], [0, 2]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.label_mean) - self.assertFalse(res_cube.has_lazy_data()) - - def test_agg_by_val__lazy(self): - # Aggregate a cube on a numeric coordinate val where label - # and val entries are not in step; the resulting cube has a label - # coord with serialised labels from the aggregated cells. - res_cube = self.cube.aggregated_by("val", MEAN) - val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") - exp0 = "alpha|alpha|beta|alpha|alpha|gamma" - exp1 = "beta|alpha|beta" - exp2 = "gamma|alpha" - label_coord = AuxCoord( - np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" - ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.val_mean) - self.assertFalse(res_cube.has_lazy_data()) - - def test_single_string_aggregation__lazy(self): - aux_coords = [ - (AuxCoord(["a", "b", "a"], long_name="foo"), 0), - (AuxCoord(["a", "a", "a"], long_name="bar"), 0), - ] - cube = iris.cube.Cube( - as_lazy_data(np.arange(12).reshape(3, 4)), - aux_coords_and_dims=aux_coords, - ) - means = np.array([[4.0, 5.0, 6.0, 7.0], [4.0, 5.0, 6.0, 7.0]]) - result = cube.aggregated_by("foo", MEAN) - self.assertTrue(result.has_lazy_data()) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) - self.assertArrayEqual(result.data, means) - self.assertFalse(result.has_lazy_data()) - - class Test_rolling_window(tests.IrisTest): def setUp(self): self.cube = Cube(np.arange(6)) @@ -3163,5 +2937,65 @@ def test_cell_method_correct_order(self): self.assertTrue(cube1 == cube2) +class Test__dimensional_metadata: + @pytest.fixture + def cube(self): + return stock.simple_2d_w_cell_measure_ancil_var() + + def test_not_found(self, cube): + with pytest.raises(KeyError, match="was not found in"): + cube._dimensional_metadata("grid_latitude") + + def test_dim_coord_name_found(self, cube): + res = cube._dimensional_metadata("bar") + assert res == cube.coord("bar") + + def test_dim_coord_instance_found(self, cube): + res = cube._dimensional_metadata(cube.coord("bar")) + assert res == cube.coord("bar") + + def test_aux_coord_name_found(self, cube): + res = cube._dimensional_metadata("wibble") + assert res == cube.coord("wibble") + + def test_aux_coord_instance_found(self, cube): + res = cube._dimensional_metadata(cube.coord("wibble")) + assert res == cube.coord("wibble") + + def test_cell_measure_name_found(self, cube): + res = cube._dimensional_metadata("cell_area") + assert res == cube.cell_measure("cell_area") + + def test_cell_measure_instance_found(self, cube): + res = cube._dimensional_metadata(cube.cell_measure("cell_area")) + assert res == cube.cell_measure("cell_area") + + def test_ancillary_var_name_found(self, cube): + res = cube._dimensional_metadata("quality_flag") + assert res == cube.ancillary_variable("quality_flag") + + def test_ancillary_var_instance_found(self, cube): + res = cube._dimensional_metadata( + cube.ancillary_variable("quality_flag") + ) + assert res == cube.ancillary_variable("quality_flag") + + def test_two_with_same_name(self, cube): + # If a cube has two _DimensionalMetadata objects with the same name, the + # current behaviour results in _dimensional_metadata returning the first + # one it finds. + cube.cell_measure("cell_area").rename("wibble") + res = cube._dimensional_metadata("wibble") + assert res == cube.coord("wibble") + + def test_two_with_same_name_specify_instance(self, cube): + # The cube has two _DimensionalMetadata objects with the same name so + # we specify the _DimensionalMetadata instance to ensure it returns the + # correct one. + cube.cell_measure("cell_area").rename("wibble") + res = cube._dimensional_metadata(cube.cell_measure("wibble")) + assert res == cube.cell_measure("wibble") + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index eb4c6c4f3f..1ebfe57773 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -10,6 +10,7 @@ import iris.tests as tests # isort:skip import collections +import copy from unittest import mock from cf_units import Unit @@ -23,11 +24,31 @@ from iris.fileformats.pp import STASH import iris.tests.stock +NOT_CUBE_MSG = "cannot be put in a cubelist, as it is not a Cube." +NON_ITERABLE_MSG = "object is not iterable" + + +class Test_append(tests.IrisTest): + def setUp(self): + self.cubelist = iris.cube.CubeList() + self.cube1 = iris.cube.Cube(1, long_name="foo") + self.cube2 = iris.cube.Cube(1, long_name="bar") + + def test_pass(self): + self.cubelist.append(self.cube1) + self.assertEqual(self.cubelist[-1], self.cube1) + self.cubelist.append(self.cube2) + self.assertEqual(self.cubelist[-1], self.cube2) + + def test_fail(self): + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist.append(None) + class Test_concatenate_cube(tests.IrisTest): def setUp(self): self.units = Unit( - "days since 1970-01-01 00:00:00", calendar="gregorian" + "days since 1970-01-01 00:00:00", calendar="standard" ) self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") self.cube1.add_dim_coord( @@ -43,7 +64,7 @@ def test_pass(self): self.assertIsInstance(result, Cube) def test_fail(self): - units = Unit("days since 1970-01-02 00:00:00", calendar="gregorian") + units = Unit("days since 1970-01-02 00:00:00", calendar="standard") cube2 = Cube([1, 2, 3], "air_temperature", units="K") cube2.add_dim_coord(DimCoord([0, 1, 2], "time", units=units), 0) with self.assertRaises(iris.exceptions.ConcatenateError): @@ -70,6 +91,29 @@ def test_empty(self): CubeList([]).concatenate_cube() +class Test_extend(tests.IrisTest): + def setUp(self): + self.cube1 = iris.cube.Cube(1, long_name="foo") + self.cube2 = iris.cube.Cube(1, long_name="bar") + self.cubelist1 = iris.cube.CubeList([self.cube1]) + self.cubelist2 = iris.cube.CubeList([self.cube2]) + + def test_pass(self): + cubelist = copy.copy(self.cubelist1) + cubelist.extend(self.cubelist2) + self.assertEqual(cubelist, self.cubelist1 + self.cubelist2) + cubelist.extend([self.cube2]) + self.assertEqual(cubelist[-1], self.cube2) + + def test_fail(self): + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist1.extend(self.cube1) + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist1.extend(None) + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist1.extend(range(3)) + + class Test_extract_overlapping(tests.IrisTest): def setUp(self): shape = (6, 14, 19) @@ -130,6 +174,44 @@ def test_different_orders(self): self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) +class Test_iadd(tests.IrisTest): + def setUp(self): + self.cube1 = iris.cube.Cube(1, long_name="foo") + self.cube2 = iris.cube.Cube(1, long_name="bar") + self.cubelist1 = iris.cube.CubeList([self.cube1]) + self.cubelist2 = iris.cube.CubeList([self.cube2]) + + def test_pass(self): + cubelist = copy.copy(self.cubelist1) + cubelist += self.cubelist2 + self.assertEqual(cubelist, self.cubelist1 + self.cubelist2) + cubelist += [self.cube2] + self.assertEqual(cubelist[-1], self.cube2) + + def test_fail(self): + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist1 += self.cube1 + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist1 += 1.0 + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist1 += range(3) + + +class Test_insert(tests.IrisTest): + def setUp(self): + self.cube1 = iris.cube.Cube(1, long_name="foo") + self.cube2 = iris.cube.Cube(1, long_name="bar") + self.cubelist = iris.cube.CubeList([self.cube1] * 3) + + def test_pass(self): + self.cubelist.insert(1, self.cube2) + self.assertEqual(self.cubelist[1], self.cube2) + + def test_fail(self): + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist.insert(0, None) + + class Test_merge_cube(tests.IrisTest): def setUp(self): self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") @@ -274,6 +356,34 @@ def test_combination_with_extra_triple(self): self.assertCML(cube, checksum=False) +class Test_setitem(tests.IrisTest): + def setUp(self): + self.cube1 = iris.cube.Cube(1, long_name="foo") + self.cube2 = iris.cube.Cube(1, long_name="bar") + self.cube3 = iris.cube.Cube(1, long_name="boo") + self.cubelist = iris.cube.CubeList([self.cube1] * 3) + + def test_pass(self): + self.cubelist[1] = self.cube2 + self.assertEqual(self.cubelist[1], self.cube2) + self.cubelist[:2] = (self.cube2, self.cube3) + self.assertEqual( + self.cubelist, + iris.cube.CubeList([self.cube2, self.cube3, self.cube1]), + ) + + def test_fail(self): + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist[0] = None + with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + self.cubelist[0:2] = [self.cube3, None] + + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist[:1] = 2.5 + with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + self.cubelist[:1] = self.cube1 + + class Test_xml(tests.IrisTest): def setUp(self): self.cubes = CubeList([Cube(np.arange(3)), Cube(np.arange(3))]) @@ -565,7 +675,7 @@ def setUp(self): self.scalar_cubes.append(Cube(i, long_name=letter)) def test_iterable(self): - self.assertTrue(isinstance(self.scalar_cubes, collections.Iterable)) + self.assertIsInstance(self.scalar_cubes, collections.abc.Iterable) def test_iteration(self): letters = "abcd" * 5 diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py new file mode 100644 index 0000000000..3230e3de00 --- /dev/null +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -0,0 +1,845 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.cube.Cube` class aggregated_by method.""" + +# import iris tests first so that some things can be initialised +# before importing anything else. +import iris.tests as tests # isort:skip + +from unittest import mock + +from cf_units import Unit +import numpy as np + +from iris._lazy_data import as_lazy_data +import iris.analysis +from iris.analysis import MEAN, SUM, Aggregator, WeightedAggregator +import iris.aux_factory +import iris.coords +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord +from iris.cube import Cube +import iris.exceptions + + +class Test_aggregated_by(tests.IrisTest): + def setUp(self): + self.cube = Cube(np.arange(44).reshape(4, 11)) + + val_coord = AuxCoord( + [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" + ) + label_coord = AuxCoord( + [ + "alpha", + "alpha", + "beta", + "beta", + "alpha", + "gamma", + "alpha", + "alpha", + "alpha", + "gamma", + "beta", + ], + long_name="label", + units="no_unit", + ) + simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") + spanning_coord = AuxCoord( + np.arange(44).reshape(4, 11), long_name="spanning" + ) + spanning_label_coord = AuxCoord( + np.arange(1, 441, 10).reshape(4, 11).astype(str), + long_name="span_label", + units="no_unit", + ) + + self.cube.add_aux_coord(simple_agg_coord, 0) + self.cube.add_aux_coord(val_coord, 1) + self.cube.add_aux_coord(label_coord, 1) + self.cube.add_aux_coord(spanning_coord, (0, 1)) + self.cube.add_aux_coord(spanning_label_coord, (0, 1)) + + self.mock_agg = mock.Mock(spec=Aggregator) + self.mock_agg.cell_method = [] + self.mock_agg.aggregate = mock.Mock( + return_value=mock.Mock(dtype="object") + ) + self.mock_agg.aggregate_shape = mock.Mock(return_value=()) + self.mock_agg.lazy_func = None + self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) + + self.mock_weighted_agg = mock.Mock(spec=WeightedAggregator) + self.mock_weighted_agg.cell_method = [] + + def mock_weighted_aggregate(*_, **kwargs): + if kwargs.get("returned", False): + return (mock.Mock(dtype="object"), mock.Mock(dtype="object")) + return mock.Mock(dtype="object") + + self.mock_weighted_agg.aggregate = mock.Mock( + side_effect=mock_weighted_aggregate + ) + self.mock_weighted_agg.aggregate_shape = mock.Mock(return_value=()) + self.mock_weighted_agg.lazy_func = None + self.mock_weighted_agg.post_process = mock.Mock( + side_effect=lambda x, y, z, **kwargs: y + ) + + self.ancillary_variable = AncillaryVariable( + [0, 1, 2, 3], long_name="foo" + ) + self.cube.add_ancillary_variable(self.ancillary_variable, 0) + self.cell_measure = CellMeasure([0, 1, 2, 3], long_name="bar") + self.cube.add_cell_measure(self.cell_measure, 0) + + self.simple_weights = np.array([1.0, 0.0, 2.0, 2.0]) + self.val_weights = np.ones_like(self.cube.data, dtype=np.float32) + + def test_2d_coord_simple_agg(self): + # For 2d coords, slices of aggregated coord should be the same as + # aggregated slices. + res_cube = self.cube.aggregated_by("simple_agg", self.mock_agg) + for res_slice, cube_slice in zip( + res_cube.slices("simple_agg"), self.cube.slices("simple_agg") + ): + cube_slice_agg = cube_slice.aggregated_by( + "simple_agg", self.mock_agg + ) + self.assertEqual( + res_slice.coord("spanning"), cube_slice_agg.coord("spanning") + ) + self.assertEqual( + res_slice.coord("span_label"), + cube_slice_agg.coord("span_label"), + ) + + def test_agg_by_label(self): + # Aggregate a cube on a string coordinate label where label + # and val entries are not in step; the resulting cube has a val + # coord of bounded cells and a label coord of single string entries. + res_cube = self.cube.aggregated_by("label", self.mock_agg) + val_coord = AuxCoord( + np.array([1.0, 0.5, 1.0]), + bounds=np.array([[0, 2], [0, 1], [0, 2]]), + long_name="val", + ) + label_coord = AuxCoord( + np.array(["alpha", "beta", "gamma"]), + long_name="label", + units="no_unit", + ) + self.assertEqual(res_cube.coord("val"), val_coord) + self.assertEqual(res_cube.coord("label"), label_coord) + + def test_agg_by_label_bounded(self): + # Aggregate a cube on a string coordinate label where label + # and val entries are not in step; the resulting cube has a val + # coord of bounded cells and a label coord of single string entries. + val_points = self.cube.coord("val").points + self.cube.coord("val").bounds = np.array( + [val_points - 0.5, val_points + 0.5] + ).T + res_cube = self.cube.aggregated_by("label", self.mock_agg) + val_coord = AuxCoord( + np.array([1.0, 0.5, 1.0]), + bounds=np.array([[-0.5, 2.5], [-0.5, 1.5], [-0.5, 2.5]]), + long_name="val", + ) + label_coord = AuxCoord( + np.array(["alpha", "beta", "gamma"]), + long_name="label", + units="no_unit", + ) + self.assertEqual(res_cube.coord("val"), val_coord) + self.assertEqual(res_cube.coord("label"), label_coord) + + def test_2d_agg_by_label(self): + res_cube = self.cube.aggregated_by("label", self.mock_agg) + # For 2d coord, slices of aggregated coord should be the same as + # aggregated slices. + for res_slice, cube_slice in zip( + res_cube.slices("val"), self.cube.slices("val") + ): + cube_slice_agg = cube_slice.aggregated_by("label", self.mock_agg) + self.assertEqual( + res_slice.coord("spanning"), cube_slice_agg.coord("spanning") + ) + + def test_agg_by_val(self): + # Aggregate a cube on a numeric coordinate val where label + # and val entries are not in step; the resulting cube has a label + # coord with serialised labels from the aggregated cells. + res_cube = self.cube.aggregated_by("val", self.mock_agg) + val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") + exp0 = "alpha|alpha|beta|alpha|alpha|gamma" + exp1 = "beta|alpha|beta" + exp2 = "gamma|alpha" + label_coord = AuxCoord( + np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" + ) + self.assertEqual(res_cube.coord("val"), val_coord) + self.assertEqual(res_cube.coord("label"), label_coord) + + def test_2d_agg_by_val(self): + res_cube = self.cube.aggregated_by("val", self.mock_agg) + # For 2d coord, slices of aggregated coord should be the same as + # aggregated slices. + for res_slice, cube_slice in zip( + res_cube.slices("val"), self.cube.slices("val") + ): + cube_slice_agg = cube_slice.aggregated_by("val", self.mock_agg) + self.assertEqual( + res_slice.coord("spanning"), cube_slice_agg.coord("spanning") + ) + + def test_single_string_aggregation(self): + aux_coords = [ + (AuxCoord(["a", "b", "a"], long_name="foo"), 0), + (AuxCoord(["a", "a", "a"], long_name="bar"), 0), + ] + cube = iris.cube.Cube( + np.arange(12).reshape(3, 4), aux_coords_and_dims=aux_coords + ) + result = cube.aggregated_by("foo", MEAN) + self.assertEqual(result.shape, (2, 4)) + self.assertEqual( + result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") + ) + + def test_ancillary_variables_and_cell_measures_kept(self): + cube_agg = self.cube.aggregated_by("val", self.mock_agg) + self.assertEqual( + cube_agg.ancillary_variables(), [self.ancillary_variable] + ) + self.assertEqual(cube_agg.cell_measures(), [self.cell_measure]) + + def test_ancillary_variables_and_cell_measures_removed(self): + cube_agg = self.cube.aggregated_by("simple_agg", self.mock_agg) + self.assertEqual(cube_agg.ancillary_variables(), []) + self.assertEqual(cube_agg.cell_measures(), []) + + def test_1d_weights(self): + self.cube.aggregated_by( + "simple_agg", self.mock_weighted_agg, weights=self.simple_weights + ) + + self.assertEqual(self.mock_weighted_agg.aggregate.call_count, 2) + + # A simple mock.assert_called_with does not work due to ValueError: The + # truth value of an array with more than one element is ambiguous. Use + # a.any() or a.all() + call_1 = self.mock_weighted_agg.aggregate.mock_calls[0] + np.testing.assert_array_equal( + call_1.args[0], + np.array( + [ + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + [11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21], + ] + ), + ) + self.assertEqual(call_1.kwargs["axis"], 0) + np.testing.assert_array_almost_equal( + call_1.kwargs["weights"], + np.array( + [ + [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ] + ), + ) + + call_2 = self.mock_weighted_agg.aggregate.mock_calls[1] + np.testing.assert_array_equal( + call_2.args[0], + np.array( + [ + [22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32], + [33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43], + ] + ), + ) + self.assertEqual(call_2.kwargs["axis"], 0) + np.testing.assert_array_almost_equal( + call_2.kwargs["weights"], + np.array( + [ + [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0], + [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0], + ] + ), + ) + + def test_2d_weights(self): + self.cube.aggregated_by( + "val", self.mock_weighted_agg, weights=self.val_weights + ) + + self.assertEqual(self.mock_weighted_agg.aggregate.call_count, 3) + + # A simple mock.assert_called_with does not work due to ValueError: The + # truth value of an array with more than one element is ambiguous. Use + # a.any() or a.all() + call_1 = self.mock_weighted_agg.aggregate.mock_calls[0] + np.testing.assert_array_equal( + call_1.args[0], + np.array( + [ + [0, 1, 2, 6, 7, 9], + [11, 12, 13, 17, 18, 20], + [22, 23, 24, 28, 29, 31], + [33, 34, 35, 39, 40, 42], + ] + ), + ) + self.assertEqual(call_1.kwargs["axis"], 1) + np.testing.assert_array_almost_equal( + call_1.kwargs["weights"], np.ones((4, 6)) + ) + + call_2 = self.mock_weighted_agg.aggregate.mock_calls[1] + np.testing.assert_array_equal( + call_2.args[0], + np.array([[3, 4, 10], [14, 15, 21], [25, 26, 32], [36, 37, 43]]), + ) + self.assertEqual(call_2.kwargs["axis"], 1) + np.testing.assert_array_almost_equal( + call_2.kwargs["weights"], np.ones((4, 3)) + ) + + call_3 = self.mock_weighted_agg.aggregate.mock_calls[2] + np.testing.assert_array_equal( + call_3.args[0], np.array([[5, 8], [16, 19], [27, 30], [38, 41]]) + ) + self.assertEqual(call_3.kwargs["axis"], 1) + np.testing.assert_array_almost_equal( + call_3.kwargs["weights"], np.ones((4, 2)) + ) + + def test_returned(self): + output = self.cube.aggregated_by( + "simple_agg", self.mock_weighted_agg, returned=True + ) + + self.assertTrue(isinstance(output, tuple)) + self.assertEqual(len(output), 2) + self.assertEqual(output[0].shape, (2, 11)) + self.assertEqual(output[1].shape, (2, 11)) + + def test_fail_1d_weights_wrong_len(self): + wrong_weights = np.array([1.0, 2.0]) + msg = ( + r"1D weights must have the same length as the dimension that is " + r"aggregated, got 2, expected 11" + ) + with self.assertRaisesRegex(ValueError, msg): + self.cube.aggregated_by( + "val", self.mock_weighted_agg, weights=wrong_weights + ) + + def test_fail_weights_wrong_shape(self): + wrong_weights = np.ones((42, 1)) + msg = ( + r"Weights must either be 1D or have the same shape as the cube, " + r"got shape \(42, 1\) for weights, \(4, 11\) for cube" + ) + with self.assertRaisesRegex(ValueError, msg): + self.cube.aggregated_by( + "val", self.mock_weighted_agg, weights=wrong_weights + ) + + +class Test_aggregated_by__lazy(tests.IrisTest): + def setUp(self): + self.data = np.arange(44).reshape(4, 11) + self.lazydata = as_lazy_data(self.data) + self.cube = Cube(self.lazydata) + + val_coord = AuxCoord( + [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" + ) + label_coord = AuxCoord( + [ + "alpha", + "alpha", + "beta", + "beta", + "alpha", + "gamma", + "alpha", + "alpha", + "alpha", + "gamma", + "beta", + ], + long_name="label", + units="no_unit", + ) + simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") + + self.label_mean = np.array( + [ + [4.0 + 1.0 / 3.0, 5.0, 7.0], + [15.0 + 1.0 / 3.0, 16.0, 18.0], + [26.0 + 1.0 / 3.0, 27.0, 29.0], + [37.0 + 1.0 / 3.0, 38.0, 40.0], + ] + ) + self.val_mean = np.array( + [ + [4.0 + 1.0 / 6.0, 5.0 + 2.0 / 3.0, 6.5], + [15.0 + 1.0 / 6.0, 16.0 + 2.0 / 3.0, 17.5], + [26.0 + 1.0 / 6.0, 27.0 + 2.0 / 3.0, 28.5], + [37.0 + 1.0 / 6.0, 38.0 + 2.0 / 3.0, 39.5], + ] + ) + + self.cube.add_aux_coord(simple_agg_coord, 0) + self.cube.add_aux_coord(val_coord, 1) + self.cube.add_aux_coord(label_coord, 1) + + self.simple_weights = np.array([1.0, 0.0, 2.0, 2.0]) + self.val_weights = 2.0 * np.ones(self.cube.shape, dtype=np.float32) + + def test_agg_by_label__lazy(self): + # Aggregate a cube on a string coordinate label where label + # and val entries are not in step; the resulting cube has a val + # coord of bounded cells and a label coord of single string entries. + res_cube = self.cube.aggregated_by("label", MEAN) + val_coord = AuxCoord( + np.array([1.0, 0.5, 1.0]), + bounds=np.array([[0, 2], [0, 1], [0, 2]]), + long_name="val", + ) + label_coord = AuxCoord( + np.array(["alpha", "beta", "gamma"]), + long_name="label", + units="no_unit", + ) + self.assertTrue(res_cube.has_lazy_data()) + self.assertEqual(res_cube.coord("val"), val_coord) + self.assertEqual(res_cube.coord("label"), label_coord) + self.assertArrayEqual(res_cube.data, self.label_mean) + self.assertFalse(res_cube.has_lazy_data()) + + def test_agg_by_val__lazy(self): + # Aggregate a cube on a numeric coordinate val where label + # and val entries are not in step; the resulting cube has a label + # coord with serialised labels from the aggregated cells. + res_cube = self.cube.aggregated_by("val", MEAN) + val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") + exp0 = "alpha|alpha|beta|alpha|alpha|gamma" + exp1 = "beta|alpha|beta" + exp2 = "gamma|alpha" + label_coord = AuxCoord( + np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" + ) + self.assertTrue(res_cube.has_lazy_data()) + self.assertEqual(res_cube.coord("val"), val_coord) + self.assertEqual(res_cube.coord("label"), label_coord) + self.assertArrayEqual(res_cube.data, self.val_mean) + self.assertFalse(res_cube.has_lazy_data()) + + def test_single_string_aggregation__lazy(self): + aux_coords = [ + (AuxCoord(["a", "b", "a"], long_name="foo"), 0), + (AuxCoord(["a", "a", "a"], long_name="bar"), 0), + ] + cube = iris.cube.Cube( + as_lazy_data(np.arange(12).reshape(3, 4)), + aux_coords_and_dims=aux_coords, + ) + means = np.array([[4.0, 5.0, 6.0, 7.0], [4.0, 5.0, 6.0, 7.0]]) + result = cube.aggregated_by("foo", MEAN) + self.assertTrue(result.has_lazy_data()) + self.assertEqual(result.shape, (2, 4)) + self.assertEqual( + result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") + ) + self.assertArrayEqual(result.data, means) + self.assertFalse(result.has_lazy_data()) + + def test_1d_weights__lazy(self): + self.assertTrue(self.cube.has_lazy_data()) + + cube_agg = self.cube.aggregated_by( + "simple_agg", SUM, weights=self.simple_weights + ) + + self.assertTrue(self.cube.has_lazy_data()) + self.assertTrue(cube_agg.has_lazy_data()) + self.assertEqual(cube_agg.shape, (2, 11)) + + row_0 = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] + row_1 = [ + 110.0, + 114.0, + 118.0, + 122.0, + 126.0, + 130.0, + 134.0, + 138.0, + 142.0, + 146.0, + 150.0, + ] + np.testing.assert_array_almost_equal( + cube_agg.data, np.array([row_0, row_1]) + ) + + def test_2d_weights__lazy(self): + self.assertTrue(self.cube.has_lazy_data()) + + cube_agg = self.cube.aggregated_by( + "val", SUM, weights=self.val_weights + ) + + self.assertTrue(self.cube.has_lazy_data()) + self.assertTrue(cube_agg.has_lazy_data()) + + self.assertEqual(cube_agg.shape, (4, 3)) + np.testing.assert_array_almost_equal( + cube_agg.data, + np.array( + [ + [50.0, 34.0, 26.0], + [182.0, 100.0, 70.0], + [314.0, 166.0, 114.0], + [446.0, 232.0, 158.0], + ] + ), + ) + + def test_returned__lazy(self): + self.assertTrue(self.cube.has_lazy_data()) + + output = self.cube.aggregated_by( + "simple_agg", SUM, weights=self.simple_weights, returned=True + ) + + self.assertTrue(self.cube.has_lazy_data()) + + self.assertTrue(isinstance(output, tuple)) + self.assertEqual(len(output), 2) + + cube = output[0] + self.assertTrue(isinstance(cube, Cube)) + self.assertTrue(cube.has_lazy_data()) + self.assertEqual(cube.shape, (2, 11)) + row_0 = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] + row_1 = [ + 110.0, + 114.0, + 118.0, + 122.0, + 126.0, + 130.0, + 134.0, + 138.0, + 142.0, + 146.0, + 150.0, + ] + np.testing.assert_array_almost_equal( + cube.data, np.array([row_0, row_1]) + ) + + weights = output[1] + self.assertEqual(weights.shape, (2, 11)) + np.testing.assert_array_almost_equal( + weights, + np.array( + [ + [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], + [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0], + ] + ), + ) + + +class Test_aggregated_by__climatology(tests.IrisTest): + def setUp(self): + self.data = np.arange(100).reshape(20, 5) + self.aggregator = iris.analysis.MEAN + + def get_result( + self, + transpose: bool = False, + second_categorised: bool = False, + bounds: bool = False, + partially_aligned: bool = False, + partially_aligned_timelike: bool = False, + invalid_units: bool = False, + already_climatological: bool = False, + climatological_op: bool = True, + ) -> Cube: + cube_data = self.data + if transpose: + cube_data = cube_data.T + axes = [1, 0] + else: + axes = [0, 1] + if not invalid_units: + units = Unit("days since 1970-01-01") + else: + units = Unit("m") + if partially_aligned_timelike: + pa_units = Unit("days since 1970-01-01") + else: + pa_units = Unit("m") + + # DimCoords + aligned_coord = DimCoord( + np.arange(20), + long_name="aligned", + units=units, + ) + orthogonal_coord = DimCoord(np.arange(5), long_name="orth") + + if bounds: + aligned_coord.guess_bounds() + + aligned_coord.climatological = already_climatological + + dim_coords_and_dims = zip([aligned_coord, orthogonal_coord], axes) + + # AuxCoords + categorised_coord1 = AuxCoord( + np.tile([0, 1], 10), long_name="cat1", units=Unit("month") + ) + + if second_categorised: + categorised_coord2 = AuxCoord( + np.tile([0, 1, 2, 3, 4], 4), long_name="cat2" + ) + categorised_coords = [categorised_coord1, categorised_coord2] + else: + categorised_coords = categorised_coord1 + + aux_coords_and_dims = [ + (categorised_coord1, axes[0]), + ] + + if second_categorised: + aux_coords_and_dims.append((categorised_coord2, axes[0])) + + if partially_aligned: + partially_aligned_coord = AuxCoord( + cube_data + 1, + long_name="part_aligned", + units=pa_units, + ) + aux_coords_and_dims.append((partially_aligned_coord, (0, 1))) + + # Build cube + in_cube = iris.cube.Cube( + cube_data, + long_name="wibble", + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims, + ) + + out_cube = in_cube.aggregated_by( + categorised_coords, + self.aggregator, + climatological=climatological_op, + ) + + return out_cube + + def test_basic(self): + """ + Check the least complicated version works (set climatological, set + points correctly). + """ + result = self.get_result() + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(2)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + self.assertTrue(aligned_coord.climatological) + self.assertIn(aligned_coord, result.dim_coords) + + categorised_coord = result.coord("cat1") + self.assertArrayEqual(categorised_coord.points, np.arange(2)) + self.assertIsNone(categorised_coord.bounds) + self.assertFalse(categorised_coord.climatological) + + def test_2d_other_coord(self): + """ + Check that we can handle aggregation applying to a 2d AuxCoord that + covers the aggregation dimension and another one. + """ + result = self.get_result(partially_aligned=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(2)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + self.assertTrue(aligned_coord.climatological) + + part_aligned_coord = result.coord("part_aligned") + self.assertArrayEqual( + part_aligned_coord.points, np.arange(46, 56).reshape(2, 5) + ) + self.assertArrayEqual( + part_aligned_coord.bounds, + np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape( + 2, 5, 2 + ), + ) + self.assertFalse(part_aligned_coord.climatological) + + def test_2d_timelike_other_coord(self): + """ + Check that we can handle aggregation applying to a 2d AuxCoord that + covers the aggregation dimension and another one. + """ + result = self.get_result( + partially_aligned=True, partially_aligned_timelike=True + ) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(2)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + self.assertTrue(aligned_coord.climatological) + + part_aligned_coord = result.coord("part_aligned") + self.assertArrayEqual( + part_aligned_coord.points, np.arange(1, 11).reshape(2, 5) + ) + self.assertArrayEqual( + part_aligned_coord.bounds, + np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape( + 2, 5, 2 + ), + ) + self.assertTrue(part_aligned_coord.climatological) + + def test_transposed(self): + """ + Check that we can handle the axis of aggregation being a different one. + """ + result = self.get_result(transpose=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(2)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + self.assertTrue(aligned_coord.climatological) + + categorised_coord = result.coord("cat1") + self.assertArrayEqual(categorised_coord.points, np.arange(2)) + self.assertIsNone(categorised_coord.bounds) + self.assertFalse(categorised_coord.climatological) + + def test_bounded(self): + """Check that we handle bounds correctly.""" + result = self.get_result(bounds=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, [-0.5, 0.5]) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) + ) + self.assertTrue(aligned_coord.climatological) + + def test_multiple_agg_coords(self): + """ + Check that we can aggregate on multiple coords on the same axis. + """ + result = self.get_result(second_categorised=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(10)) + self.assertArrayEqual( + aligned_coord.bounds, + np.array([np.arange(10), np.arange(10, 20)]).T, + ) + self.assertTrue(aligned_coord.climatological) + + categorised_coord1 = result.coord("cat1") + self.assertArrayEqual( + categorised_coord1.points, np.tile(np.arange(2), 5) + ) + self.assertIsNone(categorised_coord1.bounds) + self.assertFalse(categorised_coord1.climatological) + + categorised_coord2 = result.coord("cat2") + self.assertArrayEqual( + categorised_coord2.points, np.tile(np.arange(5), 2) + ) + self.assertIsNone(categorised_coord2.bounds) + self.assertFalse(categorised_coord2.climatological) + + def test_non_climatological_units(self): + """ + Check that the failure to set the climatological flag on an incompatible + unit is handled quietly. + """ + result = self.get_result(invalid_units=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(9, 11)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + self.assertFalse(aligned_coord.climatological) + + def test_clim_in_clim_op(self): + """ + Check the least complicated version works (set climatological, set + points correctly). For the input coordinate to be climatological, it + must have bounds + """ + result = self.get_result(bounds=True, already_climatological=True) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, [-0.5, 0.5]) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) + ) + self.assertTrue(aligned_coord.climatological) + + categorised_coord = result.coord("cat1") + self.assertArrayEqual(categorised_coord.points, np.arange(2)) + self.assertIsNone(categorised_coord.bounds) + self.assertFalse(categorised_coord.climatological) + + def test_clim_in_no_clim_op(self): + """ + Check the least complicated version works (set climatological, set + points correctly). For the input coordinate to be climatological, it + must have bounds. + """ + result = self.get_result( + bounds=True, already_climatological=True, climatological_op=False + ) + + aligned_coord = result.coord("aligned") + self.assertArrayEqual(aligned_coord.points, np.arange(9, 11)) + self.assertArrayEqual( + aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) + ) + self.assertTrue(aligned_coord.climatological) + + categorised_coord = result.coord("cat1") + self.assertArrayEqual(categorised_coord.points, np.arange(2)) + self.assertIsNone(categorised_coord.bounds) + self.assertFalse(categorised_coord.climatological) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py index eab3e7942d..e6b1425110 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py @@ -28,10 +28,10 @@ def setUp(self): def test_cube_attributes(self): self.assertEqual(id(self.cube), self.representer.cube_id) - self.assertStringEqual(str(self.cube), self.representer.cube_str) + self.assertMultiLineEqual(str(self.cube), self.representer.cube_str) def test__heading_contents(self): - content = set(self.representer.str_headings.values()) + content = set(self.representer.sections_data.values()) self.assertEqual(len(content), 1) self.assertIsNone(list(content)[0]) @@ -131,21 +131,21 @@ def setUp(self): def test_population(self): nonmesh_values = [ value - for key, value in self.representer.str_headings.items() + for key, value in self.representer.sections_data.items() if "Mesh" not in key ] for v in nonmesh_values: self.assertIsNotNone(v) def test_headings__dimcoords(self): - contents = self.representer.str_headings["Dimension coordinates:"] + contents = self.representer.sections_data["Dimension coordinates:"] content_str = ",".join(content for content in contents) dim_coords = [c.name() for c in self.cube.dim_coords] for coord in dim_coords: self.assertIn(coord, content_str) def test_headings__auxcoords(self): - contents = self.representer.str_headings["Auxiliary coordinates:"] + contents = self.representer.sections_data["Auxiliary coordinates:"] content_str = ",".join(content for content in contents) aux_coords = [ c.name() for c in self.cube.aux_coords if c.shape != (1,) @@ -154,14 +154,14 @@ def test_headings__auxcoords(self): self.assertIn(coord, content_str) def test_headings__derivedcoords(self): - contents = self.representer.str_headings["Derived coordinates:"] + contents = self.representer.sections_data["Derived coordinates:"] content_str = ",".join(content for content in contents) derived_coords = [c.name() for c in self.cube.derived_coords] for coord in derived_coords: self.assertIn(coord, content_str) def test_headings__cellmeasures(self): - contents = self.representer.str_headings["Cell measures:"] + contents = self.representer.sections_data["Cell measures:"] content_str = ",".join(content for content in contents) cell_measures = [ c.name() for c in self.cube.cell_measures() if c.shape != (1,) @@ -170,7 +170,7 @@ def test_headings__cellmeasures(self): self.assertIn(coord, content_str) def test_headings__ancillaryvars(self): - contents = self.representer.str_headings["Ancillary variables:"] + contents = self.representer.sections_data["Ancillary variables:"] content_str = ",".join(content for content in contents) ancillary_variables = [ c.name() for c in self.cube.ancillary_variables() @@ -179,7 +179,7 @@ def test_headings__ancillaryvars(self): self.assertIn(coord, content_str) def test_headings__scalarcellmeasures(self): - contents = self.representer.str_headings["Scalar cell measures:"] + contents = self.representer.sections_data["Scalar cell measures:"] content_str = ",".join(content for content in contents) scalar_cell_measures = [ c.name() for c in self.cube.cell_measures() if c.shape == (1,) @@ -188,7 +188,7 @@ def test_headings__scalarcellmeasures(self): self.assertIn(coord, content_str) def test_headings__scalarcoords(self): - contents = self.representer.str_headings["Scalar coordinates:"] + contents = self.representer.sections_data["Scalar coordinates:"] content_str = ",".join(content for content in contents) scalar_coords = [ c.name() for c in self.cube.coords() if c.shape == (1,) @@ -197,14 +197,14 @@ def test_headings__scalarcoords(self): self.assertIn(coord, content_str) def test_headings__attributes(self): - contents = self.representer.str_headings["Attributes:"] + contents = self.representer.sections_data["Attributes:"] content_str = ",".join(content for content in contents) for attr_name, attr_value in self.cube.attributes.items(): self.assertIn(attr_name, content_str) self.assertIn(attr_value, content_str) def test_headings__cellmethods(self): - contents = self.representer.str_headings["Cell methods:"] + contents = self.representer.sections_data["Cell methods:"] content_str = ",".join(content for content in contents) for method in self.cube.cell_methods: name = method.method @@ -328,22 +328,6 @@ def test__attribute_row(self): self.assertIn(colspan_str, row_str) -@tests.skip_data -class Test__expand_last_cell(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - col_span = self.representer.ndims - self.row = self.representer._make_row( - "title", body="first", col_span=col_span - ) - - def test_add_line(self): - cell = self.representer._expand_last_cell(self.row[-2], "second") - self.assertIn("first
second", cell) - - @tests.skip_data class Test__make_content(tests.IrisTest): def setUp(self): @@ -372,15 +356,21 @@ def test_included(self): def test_not_included(self): # `stock.simple_3d()` only contains the `Dimension coordinates` attr. - not_included = list(self.representer.str_headings.keys()) + not_included = list(self.representer.sections_data.keys()) not_included.pop(not_included.index("Dimension coordinates:")) for heading in not_included: self.assertNotIn(heading, self.result) def test_mesh_included(self): # self.mesh_cube contains a `Mesh coordinates` section. - included = "Mesh coordinates" - self.assertIn(included, self.mesh_result) + self.assertIn( + 'Mesh coordinates', + self.mesh_result, + ) + # and a `Mesh:` section. + self.assertIn( + 'Mesh', self.mesh_result + ) mesh_coord_names = [ c.name() for c in self.mesh_cube.coords(mesh_coords=True) ] @@ -389,11 +379,55 @@ def test_mesh_included(self): def test_mesh_not_included(self): # self.mesh_cube _only_ contains a `Mesh coordinates` section. - not_included = list(self.representer.str_headings.keys()) + not_included = list(self.representer.sections_data.keys()) not_included.pop(not_included.index("Mesh coordinates:")) for heading in not_included: self.assertNotIn(heading, self.result) + def test_mesh_result(self): + # A plain snapshot of a simple meshcube case. + self.assertString(self.mesh_result) + + +class Test__make_content__string_attrs(tests.IrisTest): + # Check how we handle "multi-line" string attributes. + # NOTE: before the adoption of iris._representation.CubeSummary, these + # used to appear as extra items in sections_data, identifiable by + # their not containing a ":", and which required to be combined into a + # single cell. + # This case no longer occurs. For now, just snapshot some current + # 'correct' behaviours, for change security and any future refactoring. + + @staticmethod + def _cube_stringattribute_html(name, attr): + cube = Cube([0]) + cube.attributes[name] = attr + representer = CubeRepresentation(cube) + representer._get_bits(representer._get_lines()) + result = representer._make_content() + return result + + def test_simple_string_attribute(self): + html = self._cube_stringattribute_html( + "single-string", "single string" + ) + self.assertString(html) + + def test_long_string_attribute(self): + attr = "long string.. " * 20 + html = self._cube_stringattribute_html("long-string", attr) + self.assertString(html) + + def test_embedded_newlines_string_attribute(self): + attr = "string\nwith\nnewlines" + html = self._cube_stringattribute_html("newlines-string", attr) + self.assertString(html) + + def test_multi_string_attribute(self): + attr = ["vector", "of", "strings"] + html = self._cube_stringattribute_html("multi-string", attr) + self.assertString(html) + @tests.skip_data class Test_repr_html(tests.IrisTest): diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index 8f029c1b7b..e44aee730a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -16,8 +16,6 @@ from unittest import mock -import numpy as np - from iris.experimental.ugrid.cf import ( CFUGridAuxiliaryCoordinateVariable, CFUGridConnectivityVariable, @@ -56,17 +54,17 @@ class Test_build_cf_groups(tests.IrisTest): @classmethod def setUpClass(cls): # Replicating syntax from test_CFReader.Test_build_cf_groups__formula_terms. - cls.mesh = netcdf_ugrid_variable("mesh", "", np.int) - cls.node_x = netcdf_ugrid_variable("node_x", "node", np.float) - cls.node_y = netcdf_ugrid_variable("node_y", "node", np.float) - cls.face_x = netcdf_ugrid_variable("face_x", "face", np.float) - cls.face_y = netcdf_ugrid_variable("face_y", "face", np.float) + cls.mesh = netcdf_ugrid_variable("mesh", "", int) + cls.node_x = netcdf_ugrid_variable("node_x", "node", float) + cls.node_y = netcdf_ugrid_variable("node_y", "node", float) + cls.face_x = netcdf_ugrid_variable("face_x", "face", float) + cls.face_y = netcdf_ugrid_variable("face_y", "face", float) cls.face_nodes = netcdf_ugrid_variable( - "face_nodes", "face vertex", np.int + "face_nodes", "face vertex", int ) - cls.levels = netcdf_ugrid_variable("levels", "levels", np.int) + cls.levels = netcdf_ugrid_variable("levels", "levels", int) cls.data = netcdf_ugrid_variable( - "data", "levels face", np.float, coordinates="face_x face_y" + "data", "levels face", float, coordinates="face_x face_y" ) # Add necessary attributes for mesh recognition. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index f2175ef99a..310e68248a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -13,7 +13,6 @@ from pathlib import Path from shutil import rmtree -from subprocess import check_call import tempfile from uuid import uuid4 @@ -22,6 +21,7 @@ load_meshes, logger, ) +from iris.tests.stock.netcdf import ncgen_from_cdl def setUpModule(): @@ -35,15 +35,11 @@ def tearDownModule(): def cdl_to_nc(cdl): - cdl_path = TMP_DIR / "tst.cdl" - nc_path = TMP_DIR / f"{uuid4()}.nc" - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl) + cdl_path = str(TMP_DIR / "tst.cdl") + nc_path = str(TMP_DIR / f"{uuid4()}.nc") # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - return str(nc_path) + ncgen_from_cdl(cdl_str=cdl, cdl_path=cdl_path, nc_path=nc_path) + return nc_path class TestsBasic(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 70173bb2ac..dee28e98cc 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -83,13 +83,13 @@ def setUp(self): "delta", "height", np.float64, bounds="delta_bnds" ) self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float + "delta_bnds", "height bnds", np.float64 ) self.sigma = netcdf_variable( "sigma", "height", np.float64, bounds="sigma_bnds" ) self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float + "sigma_bnds", "height bnds", np.float64 ) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" @@ -185,13 +185,13 @@ def setUp(self): "delta", "height", np.float64, bounds="delta_bnds" ) self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float + "delta_bnds", "height bnds", np.float64 ) self.sigma = netcdf_variable( "sigma", "height", np.float64, bounds="sigma_bnds" ) self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float + "sigma_bnds", "height bnds", np.float64 ) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 0d3ed932e8..c18bdb8399 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -9,13 +9,14 @@ """ from pathlib import Path import shutil -import subprocess import tempfile +import warnings import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube +from iris.tests.stock.netcdf import ncgen_from_cdl """ Notes on testing method. @@ -29,9 +30,7 @@ As it's hard to construct a suitable CFReader from scratch, it would seem simpler (for now) to use an ACTUAL FILE. Likewise, the easiest approach to that is with CDL and "ncgen". -To do this, we need a test "fixture" that can create suitable test files in a -temporary directory. - +For this, we just use 'tests.stock.netcdf.ncgen_from_cdl'. """ @@ -77,12 +76,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): """ # Write the CDL to a file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - - # Create a netCDF file from the CDL file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - subprocess.check_call(command, shell=True) + ncgen_from_cdl(cdl_string, cdl_path, nc_path) # Simulate the inner part of the file reading process. cf = CFReader(nc_path) @@ -95,10 +89,19 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Use 'patch' so it is restored after the test. self.patch("iris.fileformats.netcdf.DEBUG", self.debug) - # Call the main translation function to load a single cube. - # _load_cube establishes per-cube facts, activates rules and - # produces an actual cube. - cube = _load_cube(engine, cf, cf_var, nc_path) + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="Ignoring a datum in netCDF load for consistency with existing " + "behaviour. In a future version of Iris, this datum will be " + "applied. To apply the datum when loading, use the " + "iris.FUTURE.datum_support flag.", + category=FutureWarning, + ) + # Call the main translation function to load a single cube. + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) # Also Record, on the cubes, which hybrid coord elements were identified # by the rules operation. @@ -114,7 +117,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Always returns a single cube. return cube - def run_testcase(self, warning=None, **testcase_kwargs): + def run_testcase(self, warning_regex=None, **testcase_kwargs): """ Run a testcase with chosen options, returning a test cube. @@ -130,10 +133,10 @@ def run_testcase(self, warning=None, **testcase_kwargs): print(cdl_string) print("------\n") - if warning is None: + if warning_regex is None: context = self.assertNoWarningsRegexp() else: - context = self.assertWarnsRegexp(warning) + context = self.assertWarnsRegex(UserWarning, warning_regex) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index a2ecdf1490..a367e7709c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -144,7 +144,6 @@ def _make_testcase_cdl( # Add a specified scale-factor, if requested. if mapping_scalefactor is not None: # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN g_string += f""" {g_varname}:{sfapo_name} = {mapping_scalefactor} ; @@ -197,6 +196,22 @@ def _make_testcase_cdl( g_string += f""" {g_varname}:{saa_name} = "y" ; """ + # Polar stereo needs a special 'latitude of projection origin', a + # 'straight_vertical_longitude_from_pole' and a `standard_parallel` or + # `scale_factor_at_projection_origin` so treat it specially + if mapping_type_name in (hh.CF_GRID_MAPPING_POLAR,): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 90.0 ; + """ + svl_name = hh.CF_ATTR_GRID_STRAIGHT_VERT_LON + g_string += f""" + {g_varname}:{svl_name} = 0.0 ; + """ + stanpar_name = hh.CF_ATTR_GRID_STANDARD_PARALLEL + g_string += f""" + {g_varname}:{stanpar_name} = 1.0 ; + """ # y-coord values if yco_values is None: @@ -392,7 +407,9 @@ def test_latlon_bad_gridmapping_varname(self): # Notes: # * behaviours all the same as 'test_bad_gridmapping_nameproperty' warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") + result = self.run_testcase( + warning_regex=warning, gridmapvar_name="grid_2" + ) self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): @@ -445,8 +462,7 @@ def test_mapping_rotated(self): # # All non-latlon coordinate systems ... # These all have projection-x/y coordinates with units of metres. - # They all work the same way, except that Mercator/Stereographic have - # parameter checking routines that can fail. + # They all work the same way. # NOTE: various mapping types *require* certain addtional properties # - without which an error will occur during translation. # - run_testcase/_make_testcase_cdl know how to provide these @@ -490,55 +506,13 @@ def test_mapping_mercator(self): ) self.check_result(result, cube_cstype=ics.Mercator) - def test_mapping_mercator__fail_unsupported(self): - # Provide a mercator grid-mapping with a non-unity scale factor, which - # we cannot handle. - # Result : fails to convert into a coord-system, and emits a warning. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(mercator) --(FAILED check has_supported_mercator_parameters) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # * grid-mapping identified : NONE - # * dim-coords identified : proj-x and -y - # * coords built : NONE (no dim or aux coords: cube has no coords) - warning = "not yet supported for Mercator" - result = self.run_testcase( - warning=warning, - mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR, - mapping_scalefactor=2.0, - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - def test_mapping_stereographic(self): result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_STEREO) self.check_result(result, cube_cstype=ics.Stereographic) - def test_mapping_stereographic__fail_unsupported(self): - # As for 'test_mapping_mercator__fail_unsupported', provide a non-unity - # scale factor, which we cannot handle. - # Result : fails to convert into a coord-system, and emits a warning. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(stereographic) --(FAILED check has_supported_stereographic_parameters) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # as for 'mercator__fail_unsupported', above - warning = "not yet supported for stereographic" - result = self.run_testcase( - warning=warning, - mapping_type_name=hh.CF_GRID_MAPPING_STEREO, - mapping_scalefactor=2.0, - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + def test_mapping_polar_stereographic(self): + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_POLAR) + self.check_result(result, cube_cstype=ics.PolarStereographic) def test_mapping_transverse_mercator(self): result = self.run_testcase( @@ -661,7 +635,7 @@ def test_mapping__mismatch__latlon_coords_missing_system(self): # * coords built : lat + lon, with no coord-system (see above) warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( - warning=warning, + warning_regex=warning, gridmapvar_name="moved", xco_name="longitude", xco_units="degrees_east", @@ -718,7 +692,7 @@ def test_mapping__mismatch__rotated_coords_missing_system(self): # * coords built : rotated lat + lon, with no coord-system (see above) warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( - warning=warning, + warning_regex=warning, gridmapvar_name="moved", xco_name="grid_longitude", xco_units="degrees", @@ -780,7 +754,7 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): # * effectively, just like previous 2 cases warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( - warning=warning, + warning_regex=warning, gridmapvar_name="moved", xco_name="projection_x", xco_units="m", @@ -900,7 +874,9 @@ def test_nondim_lats(self): # * in terms of rule triggering, this is not distinct from the # "normal" case : but latitude is now created as an aux-coord. warning = "must be.* monotonic" - result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + result = self.run_testcase( + warning_regex=warning, yco_values=[0.0, 0.0] + ) self.check_result(result, yco_is_aux=True) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 3413090a3d..d962fc2758 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -207,7 +207,7 @@ def test_unrecognised_verticaltype(self): result = self.run_testcase( formula_root_name="unknown", term_names=["a", "b"], - warning="Ignored formula of unrecognised type: 'unknown'.", + warning_regex="Ignored formula of unrecognised type: 'unknown'.", ) # Check that it picks up the terms, but *not* the factory root coord, # which is simply discarded. @@ -226,7 +226,7 @@ def test_two_formulae(self): extra_type = "ocean_sigma_coordinate" result = self.run_testcase( - extra_formula_type=extra_type, warning=warning + extra_formula_type=extra_type, warning_regex=warning ) # NOTE: FOR NOW, check expected behaviour : only one factory will be # built, but there are coordinates (terms) for both types. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 47760aadcb..59ffa30684 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -313,7 +313,7 @@ def test_dim_nonmonotonic(self): # 002 : fc_provides_coordinate_(time[[_period]]) # 003 : fc_build_coordinate_(time[[_period]]) msg = "Failed to create.* dimension coordinate" - result = self.run_testcase(values_all_zero=True, warning=msg) + result = self.run_testcase(values_all_zero=True, warning_regex=msg) self.check_result(result, "aux") def test_dim_fails_typeident(self): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index 2be5477cb7..ab61d3b1b2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -29,6 +29,7 @@ def test_valid(self): longitude_of_projection_origin=-90, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, + standard_parallel=10, ) cs = build_mercator_coordinate_system(None, cf_grid_var) @@ -40,6 +41,7 @@ def test_valid(self): ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), + standard_parallel=(cf_grid_var.standard_parallel), ) self.assertEqual(cs, expected) @@ -49,6 +51,7 @@ def test_inverse_flattening(self): longitude_of_projection_origin=-90, semi_major_axis=6377563.396, inverse_flattening=299.3249646, + standard_parallel=10, ) cs = build_mercator_coordinate_system(None, cf_grid_var) @@ -61,6 +64,7 @@ def test_inverse_flattening(self): cf_grid_var.semi_major_axis, inverse_flattening=cf_grid_var.inverse_flattening, ), + standard_parallel=(cf_grid_var.standard_parallel), ) self.assertEqual(cs, expected) @@ -69,6 +73,7 @@ def test_longitude_missing(self): spec=[], semi_major_axis=6377563.396, inverse_flattening=299.3249646, + standard_parallel=10, ) cs = build_mercator_coordinate_system(None, cf_grid_var) @@ -77,7 +82,52 @@ def test_longitude_missing(self): ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, inverse_flattening=cf_grid_var.inverse_flattening, - ) + ), + standard_parallel=(cf_grid_var.standard_parallel), + ) + self.assertEqual(cs, expected) + + def test_standard_parallel_missing(self): + cf_grid_var = mock.Mock( + spec=[], + longitude_of_projection_origin=-90, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_mercator_coordinate_system(None, cf_grid_var) + + expected = Mercator( + longitude_of_projection_origin=( + cf_grid_var.longitude_of_projection_origin + ), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + def test_scale_factor_at_projection_origin(self): + cf_grid_var = mock.Mock( + spec=[], + longitude_of_projection_origin=-90, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + scale_factor_at_projection_origin=1.3, + ) + + cs = build_mercator_coordinate_system(None, cf_grid_var) + + expected = Mercator( + longitude_of_projection_origin=( + cf_grid_var.longitude_of_projection_origin + ), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + scale_factor_at_projection_origin=( + cf_grid_var.scale_factor_at_projection_origin + ), ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py new file mode 100755 index 0000000000..09cfde9d5b --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -0,0 +1,150 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_polar_stereographic_coordinate_system`. + +""" + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests # isort:skip + +from unittest import mock + +import iris +from iris.coord_systems import PolarStereographic +from iris.fileformats._nc_load_rules.helpers import ( + build_polar_stereographic_coordinate_system, +) + + +class TestBuildPolarStereographicCoordinateSystem(tests.IrisTest): + def test_valid_north(self): + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_polar_stereographic_coordinate_system(None, cf_grid_var) + + expected = PolarStereographic( + central_lon=(cf_grid_var.straight_vertical_longitude_from_pole), + central_lat=(cf_grid_var.latitude_of_projection_origin), + scale_factor_at_projection_origin=( + cf_grid_var.scale_factor_at_projection_origin + ), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + def test_valid_south(self): + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=-90, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_polar_stereographic_coordinate_system(None, cf_grid_var) + + expected = PolarStereographic( + central_lon=(cf_grid_var.straight_vertical_longitude_from_pole), + central_lat=(cf_grid_var.latitude_of_projection_origin), + scale_factor_at_projection_origin=( + cf_grid_var.scale_factor_at_projection_origin + ), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + def test_valid_with_standard_parallel(self): + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + standard_parallel=30, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_polar_stereographic_coordinate_system(None, cf_grid_var) + + expected = PolarStereographic( + central_lon=(cf_grid_var.straight_vertical_longitude_from_pole), + central_lat=(cf_grid_var.latitude_of_projection_origin), + true_scale_lat=(cf_grid_var.standard_parallel), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + def test_valid_with_false_easting_northing(self): + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + scale_factor_at_projection_origin=1, + false_easting=30, + false_northing=40, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_polar_stereographic_coordinate_system(None, cf_grid_var) + + expected = PolarStereographic( + central_lon=(cf_grid_var.straight_vertical_longitude_from_pole), + central_lat=(cf_grid_var.latitude_of_projection_origin), + scale_factor_at_projection_origin=( + cf_grid_var.scale_factor_at_projection_origin + ), + false_easting=(cf_grid_var.false_easting), + false_northing=(cf_grid_var.false_northing), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + def test_valid_nonzero_veritcal_lon(self): + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=30, + latitude_of_projection_origin=90, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + + cs = build_polar_stereographic_coordinate_system(None, cf_grid_var) + + expected = PolarStereographic( + central_lon=(cf_grid_var.straight_vertical_longitude_from_pole), + central_lat=(cf_grid_var.latitude_of_projection_origin), + scale_factor_at_projection_origin=( + cf_grid_var.scale_factor_at_projection_origin + ), + ellipsoid=iris.coord_systems.GeogCS( + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) + self.assertEqual(cs, expected) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 5058e4d7d3..3796aeebab 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -26,12 +26,13 @@ class TestBuildStereographicCoordinateSystem(tests.IrisTest): def _test(self, inverse_flattening=False, no_offsets=False): test_easting = -100 test_northing = 200 + test_scale_factor = 1.2 gridvar_props = dict( latitude_of_projection_origin=0, longitude_of_projection_origin=0, false_easting=test_easting, false_northing=test_northing, - scale_factor_at_projection_origin=1, + scale_factor_at_projection_origin=test_scale_factor, semi_major_axis=6377563.396, ) @@ -61,6 +62,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): central_lon=cf_grid_var.longitude_of_projection_origin, false_easting=test_easting, false_northing=test_northing, + scale_factor_at_projection_origin=test_scale_factor, ellipsoid=expected_ellipsoid, ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index dfe2895f29..bb94adc72e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -28,7 +28,7 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): + def test_valid_base(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], @@ -45,85 +45,68 @@ def test_valid(self): self.assertTrue(is_valid) - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # Mercator projections + def test_valid_false_easting_northing(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=0, - scale_factor_at_projection_origin=0.9, + longitude_of_projection_origin=-90, + false_easting=15, + false_northing=10, + scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") + self.assertTrue(is_valid) - def test_invalid_standard_parallel(self): - # Iris does not yet support standard parallels other than zero for - # Mercator projections + def test_valid_standard_parallel(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, + longitude_of_projection_origin=-90, false_easting=0, false_northing=0, - standard_parallel=30, + standard_parallel=15, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Standard parallel") + self.assertTrue(is_valid) - def test_invalid_false_easting(self): - # Iris does not yet support false eastings other than zero for - # Mercator projections + def test_valid_scale_factor(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, - false_easting=100, + false_easting=0, false_northing=0, - scale_factor_at_projection_origin=1, + scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False easting") + self.assertTrue(is_valid) - def test_invalid_false_northing(self): - # Iris does not yet support false northings other than zero for + def test_invalid_scale_factor_and_standard_parallel(self): + # Scale factor and standard parallel cannot both be specified for # Mercator projections cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, - false_northing=100, - scale_factor_at_projection_origin=1, + false_northing=0, + scale_factor_at_projection_origin=0.9, + standard_parallel=20, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) @@ -135,7 +118,11 @@ def test_invalid_false_northing(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False northing") + self.assertRegex( + str(warns[0]), + "both " + '"scale_factor_at_projection_origin" and "standard_parallel"', + ) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py new file mode 100755 index 0000000000..6e6d6e4e81 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -0,0 +1,242 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +has_supported_polar_stereographic_parameters`. + +""" + +from unittest import mock +import warnings + +from iris.fileformats._nc_load_rules.helpers import ( + has_supported_polar_stereographic_parameters, +) + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests # isort:skip + + +def _engine(cf_grid_var, cf_name): + cf_group = {cf_name: cf_grid_var} + cf_var = mock.Mock(cf_group=cf_group) + return mock.Mock(cf_var=cf_var) + + +class TestHasSupportedPolarStereographicParameters(tests.IrisTest): + def test_valid_base_north(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_valid_base_south(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=-90, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_valid_straight_vertical_longitude(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=30, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_valid_false_easting_northing(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=15, + false_northing=10, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_valid_standard_parallel(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + standard_parallel=15, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_valid_scale_factor(self): + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=0.9, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertTrue(is_valid) + + def test_invalid_scale_factor_and_standard_parallel(self): + # Scale factor and standard parallel cannot both be specified for + # Polar Stereographic projections + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=0.9, + standard_parallel=20, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertFalse(is_valid) + self.assertEqual(len(warns), 1) + self.assertRegex( + str(warns[0]), + "both " + '"scale_factor_at_projection_origin" and "standard_parallel"', + ) + + def test_absent_scale_factor_and_standard_parallel(self): + # Scale factor and standard parallel cannot both be specified for + # Polar Stereographic projections + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=90, + false_easting=0, + false_northing=0, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertFalse(is_valid) + self.assertEqual(len(warns), 1) + self.assertRegex( + str(warns[0]), + 'One of "scale_factor_at_projection_origin" and ' + '"standard_parallel" is required.', + ) + + def test_invalid_latitude_of_projection_origin(self): + # Scale factor and standard parallel cannot both be specified for + # Polar Stereographic projections + cf_name = "polar_stereographic" + cf_grid_var = mock.Mock( + spec=[], + straight_vertical_longitude_from_pole=0, + latitude_of_projection_origin=45, + false_easting=0, + false_northing=0, + scale_factor_at_projection_origin=1, + semi_major_axis=6377563.396, + semi_minor_axis=6356256.909, + ) + engine = _engine(cf_grid_var, cf_name) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + is_valid = has_supported_polar_stereographic_parameters( + engine, cf_name + ) + + self.assertFalse(is_valid) + self.assertEqual(len(warns), 1) + self.assertRegex( + str(warns[0]), + r'"latitude_of_projection_origin" must be \+90 or -90\.', + ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py deleted file mode 100644 index 8bec823f4b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -has_supported_stereographic_parameters`. - -""" - -from unittest import mock -import warnings - -from iris.fileformats._nc_load_rules.helpers import ( - has_supported_stereographic_parameters, -) - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - - -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - - -class TestHasSupportedStereographicParameters(tests.IrisTest): - def test_valid(self): - cf_name = "stereographic" - cf_grid_var = mock.Mock( - spec=[], - latitude_of_projection_origin=0, - longitude_of_projection_origin=0, - false_easting=-100, - false_northing=200, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - is_valid = has_supported_stereographic_parameters(engine, cf_name) - - self.assertTrue(is_valid) - - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # stereographic projections - cf_name = "stereographic" - cf_grid_var = mock.Mock( - spec=[], - latitude_of_projection_origin=0, - longitude_of_projection_origin=0, - false_easting=-100, - false_northing=200, - scale_factor_at_projection_origin=0.9, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_stereographic_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py index ee814ea168..e17082b5e9 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py @@ -30,7 +30,7 @@ TransverseMercator, VerticalPerspective, ) -from iris.coords import DimCoord +from iris.coords import AuxCoord, DimCoord from iris.cube import Cube from iris.fileformats.netcdf import Saver import iris.tests.stock as stock @@ -87,11 +87,17 @@ def _mercator_cube(self, ellipsoid=None): cube.add_dim_coord(coord, 1) return cube - def _stereo_cube(self, ellipsoid=None): + def _stereo_cube(self, ellipsoid=None, scale_factor=None): data = self.array_lib.arange(12).reshape(3, 4) cube = Cube(data, "air_pressure_anomaly") stereo = Stereographic( - -10.0, 20.0, 500000.0, -200000.0, None, ellipsoid + -10.0, + 20.0, + 500000.0, + -200000.0, + None, + ellipsoid, + scale_factor_at_projection_origin=scale_factor, ) coord = DimCoord( np.arange(3), @@ -160,6 +166,14 @@ def test_stereographic_no_ellipsoid(self): saver.write(cube) self.assertCDL(nc_path) + def test_stereographic_scale_factor(self): + # Create a Cube with a stereographic coordinate system. + cube = self._stereo_cube(scale_factor=1.3) + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + self.assertCDL(nc_path) + def _simple_cube(self, dtype): data = self.array_lib.arange(12, dtype=dtype).reshape(3, 4) points = np.arange(3, dtype=dtype) @@ -299,6 +313,22 @@ def test_with_climatology(self): saver.write(cube) self.assertCDL(nc_path) + def test_dimensional_to_scalar(self): + # Bounds for 1 point are still in a 2D array. + scalar_bounds = self.array_lib.arange(2).reshape(1, 2) + scalar_point = scalar_bounds.mean() + scalar_data = self.array_lib.zeros(1) + scalar_coord = AuxCoord(points=scalar_point, bounds=scalar_bounds) + cube = Cube(scalar_data, aux_coords_and_dims=[(scalar_coord, 0)])[0] + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = nc.Dataset(nc_path) + # Confirm that the only dimension is the one denoting the number + # of bounds - have successfully saved the 2D bounds array into 1D. + self.assertEqual(["bnds"], list(ds.dimensions.keys())) + ds.close() + class Test__create_cf_bounds(tests.IrisTest): # Method is substituted in test_Saver__lazy. @@ -519,8 +549,9 @@ def test_contains_fill_value_passed(self): # Test that a warning is raised if the data contains the fill value. cube = self._make_cube(">f4") fill_value = 1 - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" + with self.assertWarnsRegex( + UserWarning, + "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): pass @@ -530,8 +561,9 @@ def test_contains_fill_value_byte(self): # when it is of a byte type. cube = self._make_cube(">i1") fill_value = 1 - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" + with self.assertWarnsRegex( + UserWarning, + "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): pass @@ -541,8 +573,9 @@ def test_contains_default_fill_value(self): # value if no fill_value argument is supplied. cube = self._make_cube(">f4") cube.data[0, 0] = nc.default_fillvals["f4"] - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" + with self.assertWarnsRegex( + UserWarning, + "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube): pass @@ -753,7 +786,7 @@ def check_call(self, coord_name, coord_system, units, expected_units): self.assertEqual(result, expected_units) def test_geogcs_latitude(self): - crs = iris.coord_systems.GeogCS(60, 0) + crs = iris.coord_systems.GeogCS(60, 30) self.check_call( "latitude", coord_system=crs, @@ -762,7 +795,7 @@ def test_geogcs_latitude(self): ) def test_geogcs_longitude(self): - crs = iris.coord_systems.GeogCS(60, 0) + crs = iris.coord_systems.GeogCS(60, 30) self.check_call( "longitude", coord_system=crs, diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py index a914dd3314..575c852ece 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py @@ -575,6 +575,7 @@ def test_nonmesh_dim(self): self.assertEqual(data_props["mesh"], mesh_name) self.assertEqual(data_props["location"], "face") + @tests.skip_data def test_nonmesh_hybrid_dim(self): # Check a case with a hybrid non-mesh dimension cube = realistic_4d() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py index bbcf2cc72b..39992d03a0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py @@ -11,9 +11,12 @@ """ +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + from pathlib import Path from shutil import rmtree -from subprocess import check_call import tempfile from cf_units import as_unit @@ -23,10 +26,7 @@ from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.mesh import MeshCoord from iris.fileformats.netcdf import load_cubes, logger - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests +from iris.tests.stock.netcdf import ncgen_from_cdl def setUpModule(): @@ -42,12 +42,7 @@ def tearDownModule(): def cdl_to_nc(cdl): cdl_path = TMP_DIR / "tst.cdl" nc_path = TMP_DIR / "tst.nc" - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + ncgen_from_cdl(cdl, cdl_path, nc_path) return str(nc_path) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py index 9c4fbf622b..bbde2d0a2d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py @@ -41,6 +41,20 @@ def test_with_interval(self): res = parse_cell_methods(cell_method_str) self.assertEqual(res, expected) + def test_multiple_axes(self): + cell_method_strings = [ + "lat: lon: standard_deviation", + "lat: lon : standard_deviation", + "lat : lon: standard_deviation", + "lat : lon : standard_deviation", + ] + expected = ( + CellMethod(method="standard_deviation", coords=["lat", "lon"]), + ) + for cell_method_str in cell_method_strings: + res = parse_cell_methods(cell_method_str) + self.assertEqual(res, expected) + def test_multiple(self): cell_method_strings = [ "time: maximum (interval: 1 hr) time: mean (interval: 1 day)", @@ -85,6 +99,51 @@ def test_comment(self): res = parse_cell_methods(cell_method_str) self.assertEqual(res, expected) + def test_comment_brackets(self): + cell_method_strings = [ + "time: minimum within days (comment: 18h(day-1)-18h)", + "time : minimum within days (comment: 18h(day-1)-18h)", + ] + expected = ( + CellMethod( + method="minimum within days", + coords="time", + intervals=None, + comments="18h(day-1)-18h", + ), + ) + for cell_method_str in cell_method_strings: + res = parse_cell_methods(cell_method_str) + self.assertEqual(res, expected) + + def test_comment_bracket_mismatch_warning(self): + cell_method_strings = [ + "time: minimum within days (comment: 18h day-1)-18h)", + "time : minimum within days (comment: 18h day-1)-18h)", + ] + for cell_method_str in cell_method_strings: + with self.assertWarns( + UserWarning, + msg="Cell methods may be incorrectly parsed due to mismatched brackets", + ): + _ = parse_cell_methods(cell_method_str) + + def test_badly_formatted_warning(self): + cell_method_strings = [ + # "time: maximum (interval: 1 hr comment: first bit " + # "time: mean (interval: 1 day comment: second bit)", + "time: (interval: 1 hr comment: first bit) " + "time: mean (interval: 1 day comment: second bit)", + "time: maximum (interval: 1 hr comment: first bit) " + "time: (interval: 1 day comment: second bit)", + ] + for cell_method_str in cell_method_strings: + with self.assertWarns( + UserWarning, + msg=f"Failed to fully parse cell method string: {cell_method_str}", + ): + _ = parse_cell_methods(cell_method_str) + def test_portions_of_cells(self): cell_method_strings = [ "area: mean where sea_ice over sea", diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/test_save.py index 830d8c5e52..669a3c4137 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_save.py @@ -9,16 +9,21 @@ # importing anything else. import iris.tests as tests # isort:skip +from pathlib import Path +from shutil import rmtree +from tempfile import mkdtemp from unittest import mock import netCDF4 as nc import numpy as np import iris -from iris.coords import DimCoord +from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, save from iris.tests.stock import lat_lon_cube +from iris.tests.stock.mesh import sample_mesh_cube class Test_conventions(tests.IrisTest): @@ -211,5 +216,145 @@ def test_multi_wrong_length(self): save(cubes, "dummy.nc", fill_value=fill_values) +class Test_HdfSaveBug(tests.IrisTest): + """ + Check for a known problem with netcdf4. + + If you create dimension with the same name as an existing variable, there + is a specific problem, relating to HDF so limited to netcdf-4 formats. + See : https://github.com/Unidata/netcdf-c/issues/1772 + + In all these testcases, a straightforward translation to the file would be + able to save [cube_2, cube_1], but *not* [cube_1, cube_2], + because the latter creates a dim of the same name as the 'cube_1' data + variable. + + Here, we are testing the specific workarounds in Iris netcdf save which + avoids that problem. + Unfortunately, owing to the complexity of the iris.fileformats.netcdf.Saver + code, there are several separate places where this had to be fixed. + + N.B. we also check that the data (mostly) survives a save-load roundtrip. + To identify the read-back cubes with the originals, we use var-names, + which works because the save code opts to adjust dimension names _instead_. + + """ + + def _check_save_and_reload(self, cubes): + tempdir = Path(mkdtemp()) + filepath = tempdir / "tmp.nc" + try: + # Save the given cubes. + save(cubes, filepath) + + # Load them back for roundtrip testing. + with PARSE_UGRID_ON_LOAD.context(): + new_cubes = iris.load(str(filepath)) + + # There should definitely still be the same number of cubes. + self.assertEqual(len(new_cubes), len(cubes)) + + # Get results in the input order, matching by var_names. + result = [new_cubes.extract_cube(cube.var_name) for cube in cubes] + + # Check that input + output match cube-for-cube. + # NB in this codeblock, before we destroy the temporary file. + for cube_in, cube_out in zip(cubes, result): + # Using special tolerant equivalence-check. + self.assertSameCubes(cube_in, cube_out) + + finally: + rmtree(tempdir) + + # Return result cubes for any additional checks. + return result + + def assertSameCubes(self, cube1, cube2): + """ + A special tolerant cube compare. + + Ignore any 'Conventions' attributes. + Ignore all var-names. + + """ + + def clean_cube(cube): + cube = cube.copy() # dont modify the original + # Remove any 'Conventions' attributes + cube.attributes.pop("Conventions", None) + # Remove var-names (as original mesh components wouldn't have them) + cube.var_name = None + for coord in cube.coords(): + coord.var_name = None + mesh = cube.mesh + if mesh: + mesh.var_name = None + for component in mesh.coords() + mesh.connectivities(): + component.var_name = None + + return cube + + self.assertEqual(clean_cube(cube1), clean_cube(cube2)) + + def test_dimcoord_varname_collision(self): + cube_2 = Cube([0, 1], var_name="cube_2") + x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") + cube_2.add_dim_coord(x_dim, 0) + # First cube has a varname which collides with the dimcoord. + cube_1 = Cube([0, 1], long_name="cube_1", var_name="dimco_name") + # Test save + loadback + reload_1, reload_2 = self._check_save_and_reload([cube_1, cube_2]) + # As re-loaded, the coord will have a different varname. + self.assertEqual(reload_2.coord("dim_x").var_name, "dimco_name_0") + + def test_anonymous_dim_varname_collision(self): + # Second cube is going to name an anonymous dim. + cube_2 = Cube([0, 1], var_name="cube_2") + # First cube has a varname which collides with the dim-name. + cube_1 = Cube([0, 1], long_name="cube_1", var_name="dim0") + # Add a dimcoord to prevent the *first* cube having an anonymous dim. + x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") + cube_1.add_dim_coord(x_dim, 0) + # Test save + loadback + self._check_save_and_reload([cube_1, cube_2]) + + def test_bounds_dim_varname_collision(self): + cube_2 = Cube([0, 1], var_name="cube_2") + x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") + x_dim.guess_bounds() + cube_2.add_dim_coord(x_dim, 0) + # First cube has a varname which collides with the bounds dimension. + cube_1 = Cube([0], long_name="cube_1", var_name="bnds") + # Test save + loadback + self._check_save_and_reload([cube_1, cube_2]) + + def test_string_dim_varname_collision(self): + cube_2 = Cube([0, 1], var_name="cube_2") + # NOTE: it *should* be possible for a cube with string data to cause + # this collision, but cubes with string data are currently not working. + # See : https://github.com/SciTools/iris/issues/4412 + x_dim = AuxCoord( + ["this", "that"], long_name="dim_x", var_name="string_auxco" + ) + cube_2.add_aux_coord(x_dim, 0) + cube_1 = Cube([0], long_name="cube_1", var_name="string4") + # Test save + loadback + self._check_save_and_reload([cube_1, cube_2]) + + def test_mesh_location_dim_varname_collision(self): + cube_2 = sample_mesh_cube() + cube_2.var_name = "cube_2" # Make it identifiable + cube_1 = Cube([0], long_name="cube_1", var_name="Mesh2d_node") + # Test save + loadback + self._check_save_and_reload([cube_1, cube_2]) + + def test_connectivity_dim_varname_collision(self): + cube_2 = sample_mesh_cube() + cube_2.var_name = "cube_2" # Make it identifiable + cube_1 = Cube([0], long_name="cube_1", var_name="Mesh_2d_face_N_nodes") + # Test save + loadback + self._check_save_and_reload([cube_1, cube_2]) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index 5ce41402ad..5e2bbcaa2c 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -43,7 +43,7 @@ ] -class TestPPField(PPField): +class DummyPPField(PPField): HEADER_DEFN = DUMMY_HEADER HEADER_DICT = dict(DUMMY_HEADER) @@ -81,7 +81,7 @@ def test_float64(self): # Tests down-casting of >f8 data to >f4. def field_checksum(data): - field = TestPPField()._ready_for_save() + field = DummyPPField()._ready_for_save() field.data = data with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: @@ -92,39 +92,39 @@ def field_checksum(data): data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) checksum_32 = field_checksum(data_64.astype(">f4")) msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegexp(msg): + with self.assertWarnsRegex(UserWarning, msg): checksum_64 = field_checksum(data_64.astype(">f8")) self.assertEqual(checksum_32, checksum_64) def test_masked_mdi_value_warning(self): # Check that an unmasked MDI value raises a warning. - field = TestPPField()._ready_for_save() + field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. field.data = np.ma.masked_array( [1.0, field.bmdi, 3.0], dtype=np.float32 ) msg = "PPField data contains unmasked points" - with self.assertWarnsRegexp(msg): + with self.assertWarnsRegex(UserWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) def test_unmasked_mdi_value_warning(self): # Check that MDI in *unmasked* data raises a warning. - field = TestPPField()._ready_for_save() + field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegexp(msg): + with self.assertWarnsRegex(UserWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) def test_mdi_masked_value_nowarning(self): # Check that a *masked* MDI value does not raise a warning. - field = TestPPField()._ready_for_save() + field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. field.data = np.ma.masked_array( @@ -141,24 +141,24 @@ def test_mdi_masked_value_nowarning(self): class Test_calendar(tests.IrisTest): def test_greg(self): - field = TestPPField() + field = DummyPPField() field.lbtim = SplittableInt(1, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "gregorian") + self.assertEqual(field.calendar, "standard") def test_360(self): - field = TestPPField() + field = DummyPPField() field.lbtim = SplittableInt(2, {"ia": 2, "ib": 1, "ic": 0}) self.assertEqual(field.calendar, "360_day") def test_365(self): - field = TestPPField() + field = DummyPPField() field.lbtim = SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}) self.assertEqual(field.calendar, "365_day") class Test_coord_system(tests.IrisTest): def _check_cs(self, bplat, bplon, rotated): - field = TestPPField() + field = DummyPPField() field.bplat = bplat field.bplon = bplon with mock.patch( @@ -203,29 +203,29 @@ def setUp(self): self.header = list(header_longs) + list(header_floats) def test_no_headers(self): - field = TestPPField() + field = DummyPPField() self.assertIsNone(field._raw_header) self.assertIsNone(field.raw_lbtim) self.assertIsNone(field.raw_lbpack) def test_lbtim_lookup(self): - self.assertEqual(TestPPField.HEADER_DICT["lbtim"], (12,)) + self.assertEqual(DummyPPField.HEADER_DICT["lbtim"], (12,)) def test_lbpack_lookup(self): - self.assertEqual(TestPPField.HEADER_DICT["lbpack"], (20,)) + self.assertEqual(DummyPPField.HEADER_DICT["lbpack"], (20,)) def test_raw_lbtim(self): raw_lbtim = 4321 - (loc,) = TestPPField.HEADER_DICT["lbtim"] + (loc,) = DummyPPField.HEADER_DICT["lbtim"] self.header[loc] = raw_lbtim - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.raw_lbtim, raw_lbtim) def test_raw_lbpack(self): raw_lbpack = 4321 - (loc,) = TestPPField.HEADER_DICT["lbpack"] + (loc,) = DummyPPField.HEADER_DICT["lbpack"] self.header[loc] = raw_lbpack - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.raw_lbpack, raw_lbpack) @@ -237,39 +237,39 @@ def setUp(self): def test_attr_singular_long(self): lbrow = 1234 - (loc,) = TestPPField.HEADER_DICT["lbrow"] + (loc,) = DummyPPField.HEADER_DICT["lbrow"] self.header[loc] = lbrow - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.lbrow, lbrow) def test_attr_multi_long(self): lbuser = (100, 101, 102, 103, 104, 105, 106) - loc = TestPPField.HEADER_DICT["lbuser"] + loc = DummyPPField.HEADER_DICT["lbuser"] self.header[loc[0] : loc[-1] + 1] = lbuser - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.lbuser, lbuser) def test_attr_singular_float(self): bdatum = 1234 - (loc,) = TestPPField.HEADER_DICT["bdatum"] + (loc,) = DummyPPField.HEADER_DICT["bdatum"] self.header[loc] = bdatum - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.bdatum, bdatum) def test_attr_multi_float(self): brsvd = (100, 101, 102, 103) - loc = TestPPField.HEADER_DICT["brsvd"] + loc = DummyPPField.HEADER_DICT["brsvd"] start = loc[0] stop = loc[-1] + 1 self.header[start:stop] = brsvd - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.brsvd, brsvd) def test_attr_lbtim(self): raw_lbtim = 4321 - (loc,) = TestPPField.HEADER_DICT["lbtim"] + (loc,) = DummyPPField.HEADER_DICT["lbtim"] self.header[loc] = raw_lbtim - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) result = field.lbtim self.assertEqual(result, raw_lbtim) self.assertIsInstance(result, SplittableInt) @@ -279,9 +279,9 @@ def test_attr_lbtim(self): def test_attr_lbpack(self): raw_lbpack = 4321 - (loc,) = TestPPField.HEADER_DICT["lbpack"] + (loc,) = DummyPPField.HEADER_DICT["lbpack"] self.header[loc] = raw_lbpack - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) result = field.lbpack self.assertEqual(result, raw_lbpack) self.assertIsInstance(result, SplittableInt) @@ -290,7 +290,7 @@ def test_attr_lbpack(self): self.assertIsInstance(result, SplittableInt) def test_attr_raw_lbtim_assign(self): - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.raw_lbpack, 0) self.assertEqual(field.lbtim, 0) raw_lbtim = 4321 @@ -299,7 +299,7 @@ def test_attr_raw_lbtim_assign(self): self.assertNotIsInstance(field.raw_lbtim, SplittableInt) def test_attr_raw_lbpack_assign(self): - field = TestPPField(header=self.header) + field = DummyPPField(header=self.header) self.assertEqual(field.raw_lbpack, 0) self.assertEqual(field.lbpack, 0) raw_lbpack = 4321 @@ -309,14 +309,14 @@ def test_attr_raw_lbpack_assign(self): def test_attr_unknown(self): with self.assertRaises(AttributeError): - TestPPField().x + DummyPPField().x class Test_lbtim(tests.IrisTest): def test_get_splittable(self): headers = [0] * 64 headers[12] = 12345 - field = TestPPField(headers) + field = DummyPPField(headers) self.assertIsInstance(field.lbtim, SplittableInt) self.assertEqual(field.lbtim.ia, 123) self.assertEqual(field.lbtim.ib, 4) @@ -325,7 +325,7 @@ def test_get_splittable(self): def test_set_int(self): headers = [0] * 64 headers[12] = 12345 - field = TestPPField(headers) + field = DummyPPField(headers) field.lbtim = 34567 self.assertIsInstance(field.lbtim, SplittableInt) self.assertEqual(field.lbtim.ia, 345) @@ -339,7 +339,7 @@ def test_set_splittable(self): # arbitrary SplittableInt with crazy named attributes. headers = [0] * 64 headers[12] = 12345 - field = TestPPField(headers) + field = DummyPPField(headers) si = SplittableInt(34567, {"foo": 0}) field.lbtim = si self.assertIsInstance(field.lbtim, SplittableInt) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index 0f2a8a2d4b..c9c4821e0a 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -65,7 +65,7 @@ def test_3d(self): def test_multiple_odd_dims(self): # Test to ensure multiple collapsed dimensions don't interfere. # make a 5-D array where dimensions 0, 2 and 3 are degenerate. - array = np.arange(3 ** 5).reshape([3] * 5) + array = np.arange(3**5).reshape([3] * 5) array[1:] = array[0:1] array[:, :, 1:] = array[:, :, 0:1] array[:, :, :, 1:] = array[:, :, :, 0:1] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index d975884cb0..cf147e5928 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -13,9 +13,7 @@ # importing anything else. import iris.tests as tests # isort:skip -import unittest - -from cf_units import CALENDAR_360_DAY, CALENDAR_GREGORIAN, Unit +from cf_units import CALENDAR_360_DAY, CALENDAR_STANDARD, Unit from cftime import datetime as nc_datetime import numpy as np @@ -40,7 +38,7 @@ def _lbcode(value=None, ix=None, iy=None): return result -_EPOCH_HOURS_UNIT = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) +_EPOCH_HOURS_UNIT = Unit("hours since epoch", calendar=CALENDAR_STANDARD) _HOURS_UNIT = Unit("hours") @@ -733,7 +731,6 @@ def test_t1_list_t2_scalar(self): class TestArrayInputWithLBTIM_0_3_1(TestField): - @unittest.skip("#3508 investigate unit test failure") def test_t1_scalar_t2_list(self): lbtim = _lbtim(ib=3, ic=1) lbcode = _lbcode(1) @@ -756,9 +753,13 @@ def test_t1_scalar_t2_list(self): ) # Expected coords. + leap_year_adjust = np.array([0, 24, 24]) points = np.ones_like(years) * lbft bounds = np.array( - [lbft - ((years - 1970) * 365 * 24 + 2 * 24), points] + [ + lbft - ((years - 1970) * 365 * 24 + 2 * 24 + leap_year_adjust), + points, + ] ).transpose() fp_coord = AuxCoord( points, @@ -766,7 +767,7 @@ def test_t1_scalar_t2_list(self): units="hours", bounds=bounds, ) - points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + leap_year_adjust bounds = np.array( [np.ones_like(points) * (8 * 24 + 9), points] ).transpose() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index 2877d6ea89..2c5d672e14 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -28,9 +28,9 @@ # -class TestEpochHours__gregorian(tests.IrisTest): +class TestEpochHours__standard(tests.IrisTest): def setUp(self): - self.calendar = cf_units.CALENDAR_GREGORIAN + self.calendar = cf_units.CALENDAR_STANDARD self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) def test_1970_1_1(self): diff --git a/lib/iris/tests/unit/io/test_expand_filespecs.py b/lib/iris/tests/unit/io/test_expand_filespecs.py index c28e4f9b2e..8720478153 100644 --- a/lib/iris/tests/unit/io/test_expand_filespecs.py +++ b/lib/iris/tests/unit/io/test_expand_filespecs.py @@ -10,6 +10,7 @@ import iris.tests as tests # isort:skip import os +from pathlib import Path import shutil import tempfile import textwrap @@ -94,7 +95,30 @@ def test_files_and_none(self): .format(self.tmpdir) ) - self.assertStringEqual(str(err.exception), expected) + self.assertMultiLineEqual(str(err.exception), expected) + + def test_false_bool_absolute(self): + tempdir = self.tmpdir + msg = os.path.join(tempdir, "no_exist.txt") + (result,) = iio.expand_filespecs([msg], False) + self.assertEqual(result, msg) + + def test_false_bool_home(self): + # ensure that not only does files_expected not error, + # but that the path is still expanded from a ~ + msg = str(Path().home() / "no_exist.txt") + (result,) = iio.expand_filespecs(["~/no_exist.txt"], False) + self.assertEqual(result, msg) + + def test_false_bool_relative(self): + cwd = os.getcwd() + try: + os.chdir(self.tmpdir) + item_out = iio.expand_filespecs(["no_exist.txt"], False) + item_in = [os.path.join(self.tmpdir, "no_exist.txt")] + self.assertEqual(item_out, item_in) + finally: + os.chdir(cwd) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py index b92e26f2d1..623cf417f2 100755 --- a/lib/iris/tests/unit/io/test_save.py +++ b/lib/iris/tests/unit/io/test_save.py @@ -26,6 +26,12 @@ def test_pathlib_save(self): "iris.io.find_saver", return_value=(lambda *args, **kwargs: None) ) + def replace_expand(file_specs, files_expected=True): + return file_specs + + # does not expand filepaths due to patch + self.patch("iris.io.expand_filespecs", replace_expand) + test_variants = [ ("string", "string"), (Path("string/string"), "string/string"), diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index e7f3adad76..66c03d04c8 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -25,6 +25,8 @@ def create_mock_cube(array): cube.has_lazy_data = unittest.mock.Mock(return_value=is_lazy_data(array)) cube.lazy_data = unittest.mock.Mock(return_value=array) cube.shape = array.shape + # Remove compute so cube is not interpreted as dask array. + del cube.compute return cube, cube_data @@ -58,6 +60,14 @@ def test_lazy_input(self): cube.lazy_data.assert_called_once() cube_data.assert_not_called() + def test_dask_array_input(self): + lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) + result = map_complete_blocks( + lazy_array, self.func, dims=(1,), out_sizes=(4,) + ) + self.assertTrue(is_lazy_data(result)) + self.assertArrayEqual(result.compute(), self.func_result) + def test_rechunk(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (2, 2))) cube, _ = create_mock_cube(lazy_array) diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index 157780dcae..0abef01e41 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -19,10 +19,11 @@ class Test(tests.IrisTest): - def test_gregorian_calendar(self): - unit = Unit("hours since 2000-04-13 00:00:00", calendar="gregorian") + def test_standard_calendar(self): + unit = Unit("hours since 2000-04-13 00:00:00", calendar="standard") coord = AuxCoord([1, 3, 6], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 1), datetime.datetime(2000, 4, 13, 3), @@ -30,10 +31,11 @@ def test_gregorian_calendar(self): ] self.assertArrayEqual(result, expected) - def test_gregorian_calendar_sub_second(self): - unit = Unit("seconds since 2000-04-13 00:00:00", calendar="gregorian") + def test_standard_calendar_sub_second(self): + unit = Unit("seconds since 2000-04-13 00:00:00", calendar="standard") coord = AuxCoord([1, 1.25, 1.5], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 0, 0, 1), datetime.datetime(2000, 4, 13, 0, 0, 1), @@ -52,9 +54,7 @@ def test_360_day_calendar(self): cftime.datetime(2000, 2, 29, calendar=calendar), cftime.datetime(2000, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_365_day_calendar(self): @@ -67,9 +67,7 @@ def test_365_day_calendar(self): cftime.datetime(2000, 2, 25, 1, 0, calendar=calendar), cftime.datetime(2000, 2, 25, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_360_day_calendar_attribute(self): diff --git a/lib/iris/tests/unit/plot/test__get_plot_objects.py b/lib/iris/tests/unit/plot/test__get_plot_objects.py new file mode 100644 index 0000000000..8586faa756 --- /dev/null +++ b/lib/iris/tests/unit/plot/test__get_plot_objects.py @@ -0,0 +1,45 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.plot._get_plot_objects` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import iris.cube + +if tests.MPL_AVAILABLE: + from iris.plot import _get_plot_objects + + +@tests.skip_plot +class Test__get_plot_objects(tests.IrisTest): + def test_scalar(self): + cube1 = iris.cube.Cube(1) + cube2 = iris.cube.Cube(1) + expected = (cube1, cube2, 1, 1, ()) + result = _get_plot_objects((cube1, cube2)) + self.assertTupleEqual(expected, result) + + def test_mismatched_size_first_scalar(self): + cube1 = iris.cube.Cube(1) + cube2 = iris.cube.Cube([1, 42]) + with self.assertRaisesRegex( + ValueError, "x and y-axis objects are not compatible" + ): + _get_plot_objects((cube1, cube2)) + + def test_mismatched_size_second_scalar(self): + cube1 = iris.cube.Cube(1) + cube2 = iris.cube.Cube([1, 42]) + with self.assertRaisesRegex( + ValueError, "x and y-axis objects are not compatible" + ): + _get_plot_objects((cube2, cube1)) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py new file mode 100644 index 0000000000..c4416c587d --- /dev/null +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -0,0 +1,45 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.plot.__replace_axes_with_cartopy_axes` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt + +from iris.plot import _replace_axes_with_cartopy_axes + + +@tests.skip_plot +class Test_replace_axes_with_cartopy_axes(tests.IrisTest): + def setUp(self): + self.fig = plt.figure() + + def test_preserve_position(self): + position = [0.17, 0.65, 0.2, 0.2] + projection = ccrs.PlateCarree() + + plt.axes(position) + _replace_axes_with_cartopy_axes(projection) + result = plt.gca() + + # result should be the same as an axes created directly with the projection. + expected = plt.axes(position, projection=projection) + + # get_position returns mpl.transforms.Bbox object, for which equality does + # not appear to be implemented. Compare the bounds (tuple) instead. + self.assertEqual( + expected.get_position().bounds, result.get_position().bounds + ) + + def tearDown(self): + plt.close(self.fig) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 40a932b9e0..21fc8efa73 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -349,6 +349,20 @@ def test_section_vector_ancils(self): ] self.assertEqual(rep, expected) + def test_section_vector_ancils_length_1(self): + # Check ancillary variables that map to a cube dimension of length 1 + # are not interpreted as scalar ancillary variables. + cube = Cube(np.zeros((1, 3)), long_name="name", units=1) + cube.add_ancillary_variable(AncillaryVariable([0], long_name="av1"), 0) + + rep = cube_replines(cube) + expected = [ + "name / (1) (-- : 1; -- : 3)", + " Ancillary variables:", + " av1 x -", + ] + self.assertEqual(rep, expected) + def test_section_vector_cell_measures(self): cube = Cube(np.zeros((2, 3)), long_name="name", units=1) cube.add_cell_measure(CellMeasure([0, 1, 2], long_name="cm"), 1) @@ -361,6 +375,20 @@ def test_section_vector_cell_measures(self): ] self.assertEqual(rep, expected) + def test_section_vector_cell_measures_length_1(self): + # Check cell measures that map to a cube dimension of length 1 are not + # interpreted as scalar cell measures. + cube = Cube(np.zeros((2, 1)), long_name="name", units=1) + cube.add_cell_measure(CellMeasure([0], long_name="cm"), 1) + + rep = cube_replines(cube) + expected = [ + "name / (1) (-- : 2; -- : 1)", + " Cell measures:", + " cm - x", + ] + self.assertEqual(rep, expected) + def test_section_scalar_coords(self): # incl points + bounds # TODO: ought to incorporate coord-based summary @@ -424,8 +452,8 @@ def test_section_scalar_ancillaries(self): rep = cube_replines(cube) expected = [ "name / (1) (-- : 2; -- : 3)", - " Ancillary variables:", - " av - -", + " Scalar ancillary variables:", + " av", ] self.assertEqual(rep, expected) @@ -465,11 +493,11 @@ def test_section_cube_attributes__string_extras(self): " escaped 'escaped\\tstring'", ( " long 'this is very very very " - "very very very very very very very very very very..." + "very very very very very very very very very very ...'" ), ( " long_multi 'multi\\nline, " - "this is very very very very very very very very very very..." + "this is very very very very very very very very very very ...'" ), ] self.assertEqual(rep, expected) @@ -488,7 +516,7 @@ def test_section_cube_attributes__array(self): " array array([1.2, 3.4])", ( " bigarray array([[ 0, 1], [ 2, 3], " - "[ 4, 5], [ 6, 7], [ 8, 9], [10, 11], [12, 13],..." + "[ 4, 5], [ 6, 7], [ 8, 9], [10, 11], [12, 13], ..." ), ] self.assertEqual(rep, expected) @@ -528,6 +556,9 @@ def test_unstructured_cube(self): " longitude - x", " Auxiliary coordinates:", " mesh_face_aux - x", + " Mesh:", + " name unknown", + " location face", ] self.assertEqual(rep, expected) diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index 3e411c020d..bcf31a016f 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -72,8 +72,10 @@ def test_blank_cube(self): self.assertTrue(vector_section.is_empty()) expected_scalar_sections = [ + "Mesh:", "Scalar coordinates:", "Scalar cell measures:", + "Scalar ancillary variables:", "Cell methods:", "Attributes:", ] @@ -221,7 +223,7 @@ def test_scalar_cube(self): self.assertTrue( all(sect.is_empty() for sect in rep.vector_sections.values()) ) - self.assertEqual(len(rep.scalar_sections), 4) + self.assertEqual(len(rep.scalar_sections), 6) self.assertEqual( len(rep.scalar_sections["Scalar coordinates:"].contents), 1 ) diff --git a/lib/iris/tests/unit/test_Future.py b/lib/iris/tests/unit/test_Future.py index dddc752b6f..f0c161b0c4 100644 --- a/lib/iris/tests/unit/test_Future.py +++ b/lib/iris/tests/unit/test_Future.py @@ -12,6 +12,7 @@ import warnings from iris import Future +import iris._deprecation def patched_future(value=False, deprecated=False, error=False): @@ -45,7 +46,7 @@ def test_valid_setting(self): def test_deprecated_warning(self): future = patched_future(deprecated=True, error=False) msg = "'Future' property 'example_future_flag' is deprecated" - with self.assertWarnsRegexp(msg): + with self.assertWarnsRegex(iris._deprecation.IrisDeprecation, msg): future.example_future_flag = False def test_deprecated_error(self): diff --git a/lib/iris/tests/unit/util/test__mask_array.py b/lib/iris/tests/unit/util/test__mask_array.py new file mode 100644 index 0000000000..91a5aca1b4 --- /dev/null +++ b/lib/iris/tests/unit/util/test__mask_array.py @@ -0,0 +1,173 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Test function :func:`iris.util._mask_array""" + +import dask.array as da +import numpy as np +import numpy.ma as ma +import pytest + +import iris._lazy_data +from iris.tests import assert_masked_array_equal +from iris.util import _mask_array + +# Set up some arrays to use through the tests. +array_1d = np.arange(4) +masked_arr_1d = ma.array(np.arange(4), mask=[1, 0, 0, 1]) +array_2by3 = np.arange(6).reshape(2, 3) + +# Any masked points on the mask itself should be ignored. So result with mask_1d +# and masked_mask_1d should be the same. +mask_1d = np.array([0, 1, 0, 1]) +masked_mask_1d = ma.array([0, 1, 1, 1], mask=[0, 0, 1, 0]) + +# Expected output depends whether input array is masked or not. +expected1 = ma.array(array_1d, mask=mask_1d) +expected2 = ma.array(array_1d, mask=[1, 1, 0, 1]) +array_choices = [(array_1d, expected1), (masked_arr_1d, expected2)] + + +@pytest.mark.parametrize( + "mask", [mask_1d, masked_mask_1d], ids=["plain-mask", "masked-mask"] +) +@pytest.mark.parametrize("lazy_mask", [False, True], ids=["real", "lazy"]) +@pytest.mark.parametrize( + "array, expected", array_choices, ids=["plain-array", "masked-array"] +) +@pytest.mark.parametrize("lazy_array", [False, True], ids=["real", "lazy"]) +def test_1d_not_in_place(array, mask, expected, lazy_array, lazy_mask): + """ + Basic test for expected behaviour when working not in place with various + array types for input. + + """ + if lazy_array: + array = iris._lazy_data.as_lazy_data(array) + + if lazy_mask: + mask = iris._lazy_data.as_lazy_data(mask) + + result = _mask_array(array, mask) + assert result is not array + + if lazy_array or lazy_mask: + assert iris._lazy_data.is_lazy_data(result) + result = iris._lazy_data.as_concrete_data(result) + + assert_masked_array_equal(expected, result) + + +# 1D in place tests. + + +def test_plain_array_in_place(): + """ + Test we get an informative error when trying to add a mask to a plain numpy + array. + + """ + arr = array_1d + mask = None + with pytest.raises( + TypeError, match="Cannot apply a mask in-place to a plain numpy array." + ): + _mask_array(arr, mask, in_place=True) + + +def test_masked_array_lazy_mask_in_place(): + """ + Test we get an informative error when trying to apply a lazy mask in-place + to a non-lazy array. + + """ + arr = masked_arr_1d + mask = da.from_array([0, 1, 0, 1]) + with pytest.raises( + TypeError, match="Cannot apply lazy mask in-place to a non-lazy array." + ): + _mask_array(arr, mask, in_place=True) + + +@pytest.mark.parametrize( + "mask", [mask_1d, masked_mask_1d], ids=["plain-mask", "masked-mask"] +) +def test_real_masked_array_in_place(mask): + """ + Check expected behaviour for applying masks in-place to a masked array. + + """ + arr = masked_arr_1d.copy() + result = _mask_array(arr, mask, in_place=True) + assert_masked_array_equal(arr, expected2) + # Resolve uses returned value regardless of whether we're working in_place. + assert result is arr + + +def test_lazy_array_in_place(): + """ + Test that in place flag is ignored for lazy arrays, and result is the same + as the not in_place case. + + """ + arr = da.from_array(np.arange(4)) + mask = np.array([0, 1, 0, 1]) + expected_computed = ma.array(range(4), mask=[0, 1, 0, 1]) + # in_place is ignored for lazy array as this is handled by _math_op_common. + result = _mask_array(arr, mask, in_place=True) + assert iris._lazy_data.is_lazy_data(result) + assert_masked_array_equal(result.compute(), expected_computed) + assert result is not arr + + +# Broadcasting tests. + +IN_PLACE_PARAMETRIZE = pytest.mark.parametrize( + "in_place", [False, True], ids=["not-in-place", "in-place"] +) + + +@IN_PLACE_PARAMETRIZE +def test_trailing_mask(in_place): + array = ma.array(array_2by3.copy()) + mask = np.array([0, 1, 0]) + expected = ma.array(array_2by3, mask=[[0, 1, 0], [0, 1, 0]]) + result = _mask_array(array, mask, in_place=in_place) + assert_masked_array_equal(result, expected) + assert result is array if in_place else result is not array + + +@IN_PLACE_PARAMETRIZE +def test_leading_mask(in_place): + arr = ma.masked_array(array_2by3.copy(), mask=[[0, 0, 0], [0, 0, 1]]) + mask = np.array([1, 0]).reshape(2, 1) + expected = ma.array(arr.data, mask=[[1, 1, 1], [0, 0, 1]]) + result = _mask_array(arr, mask, in_place=in_place) + assert_masked_array_equal(result, expected) + assert result is arr if in_place else result is not arr + + +def test_lazy_trailing_mask(): + arr = da.ma.masked_array(array_2by3, mask=[[0, 1, 1], [0, 0, 0]]) + mask = np.array([0, 1, 0]) + expected_computed = ma.array(array_2by3, mask=[[0, 1, 1], [0, 1, 0]]) + result = _mask_array(arr, mask, in_place=False) + assert iris._lazy_data.is_lazy_data(result) + assert_masked_array_equal(result.compute(), expected_computed) + assert result is not arr + + +def test_lazy_leading_mask(): + arr = da.from_array(array_2by3) + mask = da.from_array([0, 1]).reshape(2, 1) + expected_computed = ma.array(array_2by3, mask=[[0, 0, 0], [1, 1, 1]]) + result = _mask_array(arr, mask, in_place=False) + assert iris._lazy_data.is_lazy_data(result) + assert_masked_array_equal(result.compute(), expected_computed) + assert result is not arr + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 2d5aaa21f1..0123d0cca5 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -9,13 +9,19 @@ # importing anything else. import iris.tests as tests # isort:skip +import pathlib + +import dask.array as da import numpy as np import numpy.ma as ma from iris.tests.stock import ( make_bounds_discontiguous_at_point, sample_2d_latlons, + simple_1d, + simple_2d, ) +import iris.util from iris.util import mask_cube @@ -23,15 +29,32 @@ def full2d_global(): return sample_2d_latlons(transformed=True) -@tests.skip_data -class Test(tests.IrisTest): +class MaskCubeMixin: + def assertOriginalMetadata(self, cube, func): + """ + Check metadata matches that of input cube. func is a string indicating + which function created the original cube. + + """ + reference_dir = pathlib.Path("unit/util/mask_cube") + reference_fname = reference_dir / f"original_cube_{func}.cml" + self.assertCML( + cube, + reference_filename=str(reference_fname), + checksum=False, + ) + + +class TestArrayMask(tests.IrisTest, MaskCubeMixin): + """Tests with mask specified as numpy array.""" + def setUp(self): # Set up a 2d cube with a masked discontiguity to test masking # of 2-dimensional cubes self.cube_2d = full2d_global() make_bounds_discontiguous_at_point(self.cube_2d, 3, 3) - def test_mask_cube_2d(self): + def test_mask_cube_2d_in_place(self): # This tests the masking of a 2d data array cube = self.cube_2d discontiguity_array = ma.getmaskarray(cube.data).copy() @@ -40,9 +63,132 @@ def test_mask_cube_2d(self): # Remove mask so that we can pass an unmasked data set to # mask_discontiguities, and check that it masks the correct point by # comparing with masked data - cube.data.mask = ma.nomask - returned = mask_cube(cube, discontiguity_array) - self.assertTrue(np.all(expected.data.mask == returned.data.mask)) + cube.data = cube.data.data + returned = mask_cube(cube, discontiguity_array, in_place=True) + np.testing.assert_array_equal(expected.data.mask, cube.data.mask) + self.assertOriginalMetadata(cube, "full2d_global") + self.assertIs(returned, None) + + def test_mask_cube_2d_not_in_place(self): + # This tests the masking of a 2d data array + cube = self.cube_2d + discontiguity_array = ma.getmaskarray(cube.data).copy() + expected = cube.copy() + + # Remove mask so that we can pass an unmasked data set to + # mask_discontiguities, and check that it masks the correct point by + # comparing with masked data + cube.data = cube.data.data + returned = mask_cube(cube, discontiguity_array, in_place=False) + np.testing.assert_array_equal(expected.data.mask, returned.data.mask) + self.assertOriginalMetadata(returned, "full2d_global") + self.assertFalse(ma.is_masked(cube.data)) + + def test_mask_cube_lazy_in_place_broadcast(self): + cube = simple_2d() + cube.data = cube.lazy_data() + mask = [0, 1, 1, 0] + returned = mask_cube(cube, mask, in_place=True) + self.assertTrue(cube.has_lazy_data()) + # Touch the data so lazyness status doesn't affect CML check. + cube.data + self.assertOriginalMetadata(cube, "simple_2d") + for subcube in cube.slices("foo"): + # Mask should have been broadcast across "bar" dimension. + np.testing.assert_array_equal(subcube.data.mask, mask) + self.assertIs(returned, None) + + +class TestCoordMask(tests.IrisTest, MaskCubeMixin): + """Tests with mask specified as a Coord.""" + + def setUp(self): + self.cube = simple_2d() + + def test_mask_cube_2d_first_dim(self): + mask_coord = iris.coords.AuxCoord([0, 1, 0], long_name="mask", units=1) + self.cube.add_aux_coord(mask_coord, 0) + + returned = mask_cube(self.cube, mask_coord, in_place=False) + # Remove extra coord so we can check against original metadata. + returned.remove_coord(mask_coord) + self.assertOriginalMetadata(returned, "simple_2d") + for subcube in returned.slices("bar"): + # Mask should have been broadcast across "foo" dimension. + np.testing.assert_array_equal(subcube.data.mask, mask_coord.points) + + def test_mask_cube_2d_second_dim(self): + mask_coord = iris.coords.AuxCoord( + [0, 0, 1, 1], long_name="mask", units=1 + ) + returned = mask_cube(self.cube, mask_coord, in_place=False, dim=1) + self.assertOriginalMetadata(returned, "simple_2d") + for subcube in returned.slices("foo"): + # Mask should have been broadcast across "bar" dimension. + np.testing.assert_array_equal(subcube.data.mask, mask_coord.points) + + +class TestCubeMask(tests.IrisTest, MaskCubeMixin): + """Tests with mask specified as a Cube.""" + + def setUp(self): + self.cube = simple_2d() + + def test_mask_cube_2d_first_dim_not_in_place(self): + mask = iris.cube.Cube([0, 1, 0], long_name="mask", units=1) + mask.add_dim_coord(self.cube.coord("bar"), 0) + + returned = mask_cube(self.cube, mask, in_place=False) + self.assertOriginalMetadata(returned, "simple_2d") + for subcube in returned.slices("bar"): + # Mask should have been broadcast across 'foo' dimension. + np.testing.assert_array_equal(subcube.data.mask, mask.data) + + def test_mask_cube_2d_first_dim_in_place(self): + mask = iris.cube.Cube([0, 1, 0], long_name="mask", units=1) + mask.add_dim_coord(self.cube.coord("bar"), 0) + + returned = mask_cube(self.cube, mask, in_place=True) + self.assertOriginalMetadata(self.cube, "simple_2d") + for subcube in self.cube.slices("bar"): + # Mask should have been broadcast across 'foo' dimension. + np.testing.assert_array_equal(subcube.data.mask, mask.data) + self.assertIs(returned, None) + + def test_mask_cube_2d_create_new_dim(self): + mask = iris.cube.Cube( + [[0, 1, 0], [0, 0, 1]], long_name="mask", units=1 + ) + + broadcast_coord = iris.coords.DimCoord([1, 2], long_name="baz") + mask.add_dim_coord(broadcast_coord, 0) + mask.add_dim_coord(self.cube.coord("bar"), 1) + + # Create length-1 dimension to enable broadcasting. + self.cube.add_aux_coord(broadcast_coord[0]) + cube = iris.util.new_axis(self.cube, "baz") + + returned = mask_cube(cube, mask, in_place=False) + self.assertCML(cube, checksum=False) + + for subcube in returned.slices_over("baz"): + # Underlying data should have been broadcast across 'baz' dimension. + np.testing.assert_array_equal(subcube.data, self.cube.data) + + for subcube in returned.slices_over("foo"): + # Mask should have been broadcast across 'foo' dimension. + np.testing.assert_array_equal(subcube.data.mask, mask.data) + + def test_mask_cube_1d_lazy_mask_in_place(self): + cube = simple_1d() + mask = cube.copy(da.from_array([0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1])) + returned = mask_cube(cube, mask, in_place=True) + self.assertIs(returned, None) + self.assertTrue(cube.has_lazy_data()) + # Touch the data so lazyness status doesn't interfere with CML check. + cube.data + self.assertOriginalMetadata(cube, "simple_1d") + np.testing.assert_array_equal(cube.data.mask, mask.data) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 74b59cc7ec..d81f2c40d7 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -7,95 +7,131 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. -import iris.tests as tests # isort:skip +# isort: off +import iris.tests as tests # noqa + +# isort: on import copy -import unittest import numpy as np +import pytest import iris from iris._lazy_data import as_lazy_data +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord +from iris.cube import Cube import iris.tests.stock as stock from iris.util import new_axis -class Test(tests.IrisTest): - def setUp(self): - self.data = np.array([[1, 2], [1, 2]]) - self.cube = iris.cube.Cube(self.data) - lat = iris.coords.DimCoord([1, 2], standard_name="latitude") - lon = iris.coords.DimCoord([1, 2], standard_name="longitude") - +class Test: + @pytest.fixture + def stock_cube(self): + cube = stock.simple_2d_w_cell_measure_ancil_var() time = iris.coords.DimCoord([1], standard_name="time") - wibble = iris.coords.AuxCoord([1], long_name="wibble") - - self.cube.add_dim_coord(lat, 0) - self.cube.add_dim_coord(lon, 1) - self.cube.add_aux_coord(time, None) - self.cube.add_aux_coord(wibble, None) - - self.coords = {"lat": lat, "lon": lon, "time": time, "wibble": wibble} + cube.add_aux_coord(time, None) + cube.coord("wibble").bounds = np.array([0, 2]).reshape((1, 2)) + return cube def _assert_cube_notis(self, cube_a, cube_b): + assert cube_a.metadata is not cube_b.metadata + for coord_a, coord_b in zip(cube_a.coords(), cube_b.coords()): - self.assertIsNot(coord_a, coord_b) + assert coord_a is not coord_b + + for av_a, av_b in zip( + cube_a.ancillary_variables(), cube_b.ancillary_variables() + ): + assert av_a is not av_b - self.assertIsNot(cube_a.metadata, cube_b.metadata) + for cm_a, cm_b in zip(cube_a.cell_measures(), cube_b.cell_measures()): + assert cm_a is not cm_b for factory_a, factory_b in zip( cube_a.aux_factories, cube_b.aux_factories ): - self.assertIsNot(factory_a, factory_b) + assert factory_a is not factory_b - def test_no_coord(self): + def test_promote_no_coord(self, stock_cube): # Providing no coordinate to promote. - res = new_axis(self.cube) - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), None) - com.add_aux_coord(self.coords["wibble"].copy(), None) + result = new_axis(stock_cube) + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), None) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), None) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + expected.add_cell_measure(stock_cube.cell_measure("cell_area"), (1, 2)) - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) + assert result == expected + self._assert_cube_notis(result, stock_cube) - def test_scalar_dimcoord(self): + def test_promote_scalar_dimcoord(self, stock_cube): # Providing a scalar coordinate to promote. - res = new_axis(self.cube, "time") - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), 0) - com.add_aux_coord(self.coords["wibble"].copy(), None) + result = new_axis(stock_cube, "time") + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), 0) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), None) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + expected.add_cell_measure(stock_cube.cell_measure("cell_area"), (1, 2)) - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) + assert result == expected + # Explicitly check time has been made a cube dim coord as cube equality + # does not check this. + assert result.coord("time") in [ + item[0] for item in result._dim_coords_and_dims + ] + self._assert_cube_notis(result, stock_cube) - def test_scalar_auxcoord(self): + def test_promote_scalar_auxcoord(self, stock_cube): # Providing a scalar coordinate to promote. - res = new_axis(self.cube, "wibble") - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), None) - com.add_aux_coord(self.coords["wibble"].copy(), 0) + result = new_axis(stock_cube, "wibble") + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), None) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), 0) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + expected.add_cell_measure(stock_cube.cell_measure("cell_area"), (1, 2)) + + assert result == expected + # Explicitly check wibble has been made a cube dim coord as cube + # equality does not check this. + assert result.coord("wibble") in [ + item[0] for item in result._dim_coords_and_dims + ] + self._assert_cube_notis(result, stock_cube) - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) + def test_promote_non_scalar(self, stock_cube): + # Provide a dimensional coordinate which is not scalar + with pytest.raises(ValueError, match="is not a scalar coordinate."): + new_axis(stock_cube, "foo") def test_maint_factory(self): # Ensure that aux factory persists. data = np.arange(12, dtype="i8").reshape((3, 4)) - orography = iris.coords.AuxCoord( + orography = AuxCoord( [10, 25, 50, 5], standard_name="surface_altitude", units="m" ) - model_level = iris.coords.AuxCoord( - [2, 1, 0], standard_name="model_level_number" - ) + model_level = AuxCoord([2, 1, 0], standard_name="model_level_number") - level_height = iris.coords.DimCoord( + level_height = DimCoord( [100, 50, 10], long_name="level_height", units="m", @@ -103,7 +139,7 @@ def test_maint_factory(self): bounds=[[150, 75], [75, 20], [20, 0]], ) - sigma = iris.coords.AuxCoord( + sigma = AuxCoord( [0.8, 0.9, 0.95], long_name="sigma", bounds=[[0.7, 0.85], [0.85, 0.97], [0.97, 1.0]], @@ -113,7 +149,7 @@ def test_maint_factory(self): level_height, sigma, orography ) - cube = iris.cube.Cube( + cube = Cube( data, standard_name="air_temperature", units="K", @@ -122,7 +158,7 @@ def test_maint_factory(self): aux_factories=[hybrid_height], ) - com = iris.cube.Cube( + com = Cube( data[None], standard_name="air_temperature", units="K", @@ -136,7 +172,7 @@ def test_maint_factory(self): ) res = new_axis(cube) - self.assertEqual(res, com) + assert res == com self._assert_cube_notis(res, cube) # Check that factory dependencies are actual coords within the cube. @@ -145,23 +181,14 @@ def test_maint_factory(self): deps = factory.dependencies for dep_name, dep_coord in deps.items(): coord_name = dep_coord.name() - msg = ( - "Factory dependency {!r} is a coord named {!r}, " - "but it is *not* the coord of that name in the new cube." - ) - self.assertIs( - dep_coord, - res.coord(coord_name), - msg.format(dep_name, coord_name), - ) - - def test_lazy_data(self): - cube = iris.cube.Cube(as_lazy_data(self.data)) - cube.add_aux_coord(iris.coords.DimCoord([1], standard_name="time")) - res = new_axis(cube, "time") - self.assertTrue(cube.has_lazy_data()) - self.assertTrue(res.has_lazy_data()) - self.assertEqual(res.shape, (1,) + cube.shape) + assert dep_coord is res.coord(coord_name) + + def test_lazy_cube_data(self, stock_cube): + stock_cube.data = as_lazy_data(stock_cube.data) + res = new_axis(stock_cube) + assert stock_cube.has_lazy_data() + assert res.has_lazy_data() + assert res.shape == (1,) + stock_cube.shape def test_masked_unit_array(self): cube = stock.simple_3d_mask() @@ -170,8 +197,114 @@ def test_masked_unit_array(self): test_cube = new_axis(test_cube, "latitude") data_shape = test_cube.data.shape mask_shape = test_cube.data.mask.shape - self.assertEqual(data_shape, mask_shape) + assert data_shape == mask_shape + + def test_expand_scalar_coord(self, stock_cube): + result = new_axis(stock_cube, "time", expand_extras=["wibble"]) + + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), 0) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), 0) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + expected.add_cell_measure(stock_cube.cell_measure("cell_area"), (1, 2)) + + assert result == expected + self._assert_cube_notis(result, stock_cube) + + def test_expand_scalar_coord_lazy_points(self, stock_cube): + stock_cube.coord("wibble").points = as_lazy_data( + stock_cube.coord("wibble").points + ) + result = new_axis(stock_cube, "time", expand_extras=["wibble"]) + assert stock_cube.coord("wibble").has_lazy_points() + assert result.coord("wibble").has_lazy_points() + assert ( + result.coord("wibble").points.shape + == stock_cube.coord("wibble").points.shape + ) + + def test_expand_scalar_coord_lazy_bounds(self, stock_cube): + stock_cube.coord("wibble").bounds = as_lazy_data(np.array([[0, 2]])) + result = new_axis(stock_cube, "time", expand_extras=["wibble"]) + assert stock_cube.coord("wibble").has_lazy_bounds() + assert result.coord("wibble").has_lazy_bounds() + assert ( + result.coord("wibble").bounds.shape + == stock_cube.coord("wibble").bounds.shape + ) + + def test_expand_cell_measure(self, stock_cube): + result = new_axis(stock_cube, "time", expand_extras=["cell_area"]) + + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), 0) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), None) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + + expected_cm = CellMeasure( + stock_cube.cell_measure("cell_area").data[None], + standard_name="cell_area", + ) + expected.add_cell_measure(expected_cm, (0, 1, 2)) + assert result == expected + self._assert_cube_notis(result, stock_cube) + + def test_expand_ancil_var(self, stock_cube): + result = new_axis(stock_cube, "time", expand_extras=["quality_flag"]) + + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), 0) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), None) + expected.add_cell_measure(stock_cube.cell_measure("cell_area"), (1, 2)) + + expected_av = AncillaryVariable( + stock_cube.ancillary_variable("quality_flag").data[None], + standard_name="quality_flag", + ) + + expected.add_ancillary_variable(expected_av, (0, 1)) + + assert result == expected + self._assert_cube_notis(result, stock_cube) + + def test_expand_multiple(self, stock_cube): + result = new_axis( + stock_cube, "time", expand_extras=["wibble", "cell_area"] + ) + + expected = iris.cube.Cube( + stock_cube.data[None], long_name="thingness", units="1" + ) + expected.add_dim_coord(stock_cube.coord("bar").copy(), 1) + expected.add_dim_coord(stock_cube.coord("foo").copy(), 2) + expected.add_aux_coord(stock_cube.coord("time").copy(), 0) + expected.add_aux_coord(stock_cube.coord("wibble").copy(), 0) + expected.add_ancillary_variable( + stock_cube.ancillary_variable("quality_flag"), 1 + ) + + expected_cm = CellMeasure( + stock_cube.cell_measure("cell_area").data[None], + standard_name="cell_area", + ) + expected.add_cell_measure(expected_cm, (0, 1, 2)) -if __name__ == "__main__": - unittest.main() + assert result == expected + self._assert_cube_notis(result, stock_cube) diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index 1efc73700b..7d9a669a9d 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -76,10 +76,15 @@ def setUp(self): # matching long names but the points array on one cube is reversed # with respect to that on the other. data = np.arange(12).reshape(3, 4) + self.a1 = iris.coords.DimCoord([1, 2, 3], long_name="a") + self.a1.guess_bounds() self.b1 = iris.coords.DimCoord([1, 2, 3, 4], long_name="b") + a2 = iris.coords.DimCoord([3, 2, 1], long_name="a") + a2.guess_bounds() b2 = iris.coords.DimCoord([4, 3, 2, 1], long_name="b") + self.span = iris.coords.AuxCoord( np.arange(12).reshape(3, 4), long_name="spanning" ) @@ -94,85 +99,93 @@ def setUp(self): data, dim_coords_and_dims=[(a2, 0), (b2, 1)] ) - def test_cube_dim(self): - cube1_reverse0 = reverse(self.cube1, 0) - cube1_reverse1 = reverse(self.cube1, 1) - cube1_reverse_both = reverse(self.cube1, (0, 1)) - - self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) + def check_coorda_reversed(self, result): self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse0.coord("a").points + self.cube2.coord("a").points, result.coord("a").points ) self.assertArrayEqual( - self.cube1.coord("b").points, cube1_reverse0.coord("b").points + self.cube2.coord("a").bounds, result.coord("a").bounds ) - self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) + def check_coorda_unchanged(self, result): self.assertArrayEqual( - self.cube1.coord("a").points, cube1_reverse1.coord("a").points + self.cube1.coord("a").points, result.coord("a").points ) self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse1.coord("b").points + self.cube1.coord("a").bounds, result.coord("a").bounds ) + def check_coordb_reversed(self, result): self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_both.data + self.cube2.coord("b").points, result.coord("b").points ) + + def check_coordb_unchanged(self, result): self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse_both.coord("a").points + self.cube1.coord("b").points, result.coord("b").points ) + + def test_cube_dim0(self): + cube1_reverse0 = reverse(self.cube1, 0) + + self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) + self.check_coorda_reversed(cube1_reverse0) + self.check_coordb_unchanged(cube1_reverse0) + + def test_cube_dim1(self): + cube1_reverse1 = reverse(self.cube1, 1) + + self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) + self.check_coordb_reversed(cube1_reverse1) + self.check_coorda_unchanged(cube1_reverse1) + + def test_cube_dim_both(self): + cube1_reverse_both = reverse(self.cube1, (0, 1)) + self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse_both.coord("b").points + self.cube1.data[::-1, ::-1], cube1_reverse_both.data ) + self.check_coorda_reversed(cube1_reverse_both) + self.check_coordb_reversed(cube1_reverse_both) - def test_cube_coord(self): + def test_cube_coord0(self): cube1_reverse0 = reverse(self.cube1, self.a1) - cube1_reverse1 = reverse(self.cube1, "b") - cube1_reverse_both = reverse(self.cube1, (self.a1, self.b1)) - cube1_reverse_spanning = reverse(self.cube1, "spanning") self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse0.coord("a").points - ) - self.assertArrayEqual( - self.cube1.coord("b").points, cube1_reverse0.coord("b").points - ) + self.check_coorda_reversed(cube1_reverse0) + self.check_coordb_unchanged(cube1_reverse0) + + def test_cube_coord1(self): + cube1_reverse1 = reverse(self.cube1, "b") self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) - self.assertArrayEqual( - self.cube1.coord("a").points, cube1_reverse1.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse1.coord("b").points - ) + self.check_coordb_reversed(cube1_reverse1) + self.check_coorda_unchanged(cube1_reverse1) + + def test_cube_coord_both(self): + cube1_reverse_both = reverse(self.cube1, (self.a1, self.b1)) self.assertArrayEqual( self.cube1.data[::-1, ::-1], cube1_reverse_both.data ) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse_both.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse_both.coord("b").points - ) + self.check_coorda_reversed(cube1_reverse_both) + self.check_coordb_reversed(cube1_reverse_both) + + def test_cube_coord_spanning(self): + cube1_reverse_spanning = reverse(self.cube1, "spanning") self.assertArrayEqual( self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data ) - self.assertArrayEqual( - self.cube2.coord("a").points, - cube1_reverse_spanning.coord("a").points, - ) - self.assertArrayEqual( - self.cube2.coord("b").points, - cube1_reverse_spanning.coord("b").points, - ) + self.check_coorda_reversed(cube1_reverse_spanning) + self.check_coordb_reversed(cube1_reverse_spanning) + self.assertArrayEqual( self.span.points[::-1, ::-1], cube1_reverse_spanning.coord("spanning").points, ) + def test_wrong_coord_name(self): msg = ( "Expected to find exactly 1 'latitude' coordinate, but found none." ) @@ -181,10 +194,12 @@ def test_cube_coord(self): ): reverse(self.cube1, "latitude") + def test_empty_list(self): msg = "Reverse was expecting a single axis or a 1d array *" with self.assertRaisesRegex(ValueError, msg): reverse(self.cube1, []) + def test_wrong_type_cube(self): msg = ( "coords_or_dims must be int, str, coordinate or sequence of " "these. Got cube." @@ -192,6 +207,7 @@ def test_cube_coord(self): with self.assertRaisesRegex(TypeError, msg): reverse(self.cube1, self.cube1) + def test_wrong_type_float(self): msg = ( "coords_or_dims must be int, str, coordinate or sequence of " "these." diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index 16dc7054f3..daf71890b1 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -20,7 +20,7 @@ class Test(tests.IrisTest): - def simple_1d_time_cubes(self, calendar="gregorian"): + def simple_1d_time_cubes(self, calendar="standard"): coord_points = [1, 2, 3, 4, 5] data_points = [273, 275, 278, 277, 274] reftimes = [ @@ -92,7 +92,7 @@ def test_time_coord_only_in_some_cubes(self): def test_multiple_time_coords_in_cube(self): cube0, cube1 = self.simple_1d_time_cubes() units = cf_units.Unit( - "days since 1980-05-02 00:00:00", calendar="gregorian" + "days since 1980-05-02 00:00:00", calendar="standard" ) aux_coord = iris.coords.AuxCoord( 72, standard_name="forecast_reference_time", units=units diff --git a/lib/iris/util.py b/lib/iris/util.py index 53cd78724e..3d82ea68c5 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -24,6 +24,8 @@ from iris._deprecation import warn_deprecated from iris._lazy_data import as_concrete_data, is_lazy_data +from iris.common import SERVICES +from iris.common.lenient import _lenient_client import iris.exceptions @@ -1094,7 +1096,7 @@ def format_array(arr): return result -def new_axis(src_cube, scalar_coord=None): +def new_axis(src_cube, scalar_coord=None, expand_extras=()): """ Create a new axis as the leading dimension of the cube, promoting a scalar coordinate if specified. @@ -1109,9 +1111,16 @@ def new_axis(src_cube, scalar_coord=None): * scalar_coord (:class:`iris.coord.Coord` or 'string') Scalar coordinate to promote to a dimension coordinate. + * expand_extras (iterable) + Auxiliary coordinates, ancillary variables and cell measures which will + be expanded so that they map to the new dimension as well as the + existing dimensions. + Returns: A new :class:`iris.cube.Cube` instance with one extra leading dimension - (length 1). + (length 1). Chosen auxiliary coordinates, cell measures and ancillary + variables will also be given an additional dimension, associated with + the leading dimension of the cube. For example:: @@ -1120,40 +1129,83 @@ def new_axis(src_cube, scalar_coord=None): >>> ncube = iris.util.new_axis(cube, 'time') >>> ncube.shape (1, 360, 360) - """ - from iris.coords import DimCoord - from iris.cube import Cube + + def _reshape_data_array(data_manager): + # Indexing numpy arrays requires loading deferred data here returning a + # copy of the data with a new leading dimension. + # If the data of the source cube (or values of the dimensional metadata + # object) is a Masked Constant, it is changed here to a Masked Array to + # allow the mask to gain an extra dimension with the data. + if data_manager.has_lazy_data(): + new_data = data_manager.lazy_data()[None] + else: + if isinstance(data_manager.data, ma.core.MaskedConstant): + new_data = ma.array([np.nan], mask=[True]) + else: + new_data = data_manager.data[None] + return new_data + + def _handle_dimensional_metadata( + cube, dm_item, cube_add_method, expand_extras + ): + cube_dims = dm_item.cube_dims(cube) + if dm_item in expand_extras: + if cube_dims == (): + new_dm_item, new_dims = dm_item.copy(), 0 + else: + new_dims = np.concatenate([(0,), np.array(cube_dims) + 1]) + new_values = _reshape_data_array(dm_item._values_dm) + kwargs = dm_item.metadata._asdict() + new_dm_item = dm_item.__class__(new_values, **kwargs) + try: + if dm_item.has_bounds(): + new_dm_item.bounds = _reshape_data_array( + dm_item._bounds_dm + ) + except AttributeError: + pass + else: + new_dims = np.array(cube_dims) + 1 + new_dm_item = dm_item.copy() + + cube_add_method(new_dm_item, new_dims) if scalar_coord is not None: scalar_coord = src_cube.coord(scalar_coord) + if not scalar_coord.shape == (1,): + emsg = scalar_coord.name() + "is not a scalar coordinate." + raise ValueError(emsg) - # Indexing numpy arrays requires loading deferred data here returning a - # copy of the data with a new leading dimension. - # If the source cube is a Masked Constant, it is changed here to a Masked - # Array to allow the mask to gain an extra dimension with the data. - if src_cube.has_lazy_data(): - new_cube = Cube(src_cube.lazy_data()[None]) - else: - if isinstance(src_cube.data, ma.core.MaskedConstant): - new_data = ma.array([np.nan], mask=[True]) - else: - new_data = src_cube.data[None] - new_cube = Cube(new_data) + expand_extras = [ + src_cube._dimensional_metadata(item) for item in expand_extras + ] + new_cube = iris.cube.Cube(_reshape_data_array(src_cube._data_manager)) new_cube.metadata = src_cube.metadata + for coord in src_cube.dim_coords: + coord_dims = np.array(src_cube.coord_dims(coord)) + 1 + new_cube.add_dim_coord(coord.copy(), coord_dims) + for coord in src_cube.aux_coords: if scalar_coord and scalar_coord == coord: - dim_coord = DimCoord.from_coord(coord) + dim_coord = iris.coords.DimCoord.from_coord(coord) new_cube.add_dim_coord(dim_coord, 0) else: - dims = np.array(src_cube.coord_dims(coord)) + 1 - new_cube.add_aux_coord(coord.copy(), dims) + _handle_dimensional_metadata( + src_cube, coord, new_cube.add_aux_coord, expand_extras + ) - for coord in src_cube.dim_coords: - coord_dims = np.array(src_cube.coord_dims(coord)) + 1 - new_cube.add_dim_coord(coord.copy(), coord_dims) + for cm in src_cube.cell_measures(): + _handle_dimensional_metadata( + src_cube, cm, new_cube.add_cell_measure, expand_extras + ) + + for av in src_cube.ancillary_variables(): + _handle_dimensional_metadata( + src_cube, av, new_cube.add_ancillary_variable, expand_extras + ) nonderived_coords = src_cube.dim_coords + src_cube.aux_coords coord_mapping = { @@ -1281,6 +1333,32 @@ def regular_step(coord): return avdiff.astype(coord.points.dtype) +def regular_points(zeroth, step, count): + """Make an array of regular points. + + Create an array of `count` points from `zeroth` + `step`, adding `step` each + time. In float32 if this gives a sufficiently regular array (tested with + points_step) and float64 if not. + + Parameters + ---------- + zeroth : number + The value *prior* to the first point value. + + step : number + The numeric difference between successive point values. + + count : number + The number of point values. + + """ + points = (zeroth + step) + step * np.arange(count, dtype=np.float32) + _, regular = iris.util.points_step(points) + if not regular: + points = (zeroth + step) + step * np.arange(count, dtype=np.float64) + return points + + def points_step(points): """Determine whether `points` has a regular step. @@ -1728,29 +1806,123 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): return bad_points_boolean -def mask_cube(cube, points_to_mask): +def _mask_array(array, points_to_mask, in_place=False): """ - Masks any cells in the data array which correspond to cells marked `True` - in the `points_to_mask` array. + Apply masking to array where points_to_mask is True/non-zero. Designed to + work with iris.analysis.maths._binary_op_common so array and points_to_mask + will be broadcastable to each other. array and points_to_mask may be numpy + or dask types (or one of each). - Args: + If array is lazy then in_place is ignored: _math_op_common will use the + returned value regardless of in_place, so we do not need to implement it + here. If in_place is True then array must be a np.ma.MaskedArray or dask + array (must be a dask array if points_to_mask is lazy). - * cube (`iris.cube.Cube`): - A 2-dimensional instance of :class:`iris.cube.Cube`. + """ + # Decide which array library to use. + if is_lazy_data(points_to_mask) or is_lazy_data(array): + al = da + if not is_lazy_data(array) and in_place: + # Non-lazy array and lazy mask should not come up for in_place + # case, due to _binary_op_common handling added at #3790. + raise TypeError( + "Cannot apply lazy mask in-place to a non-lazy array." + ) + in_place = False - * points_to_mask (`numpy.ndarray` of bool): - A 2d boolean array of Truth values representing points to mask in the - x and y arrays of the cube. + elif in_place and not isinstance(array, ma.MaskedArray): + raise TypeError("Cannot apply a mask in-place to a plain numpy array.") + else: + al = np - Returns: + points_to_mask = points_to_mask.astype(bool) + + # Treat any masked points on our mask as False. + points_to_mask = al.ma.filled(points_to_mask, False) + + # Get broadcasted views of the arrays. Note that broadcast_arrays does not + # preserve masks, so we need to explicitly handle any exising mask on array. + array_mask = al.ma.getmaskarray(array) + + array_data, array_mask, points_to_mask = al.broadcast_arrays( + array, array_mask, points_to_mask + ) + + new_mask = al.logical_or(array_mask, points_to_mask) + + if in_place: + array.mask = new_mask + result = array # Resolve uses returned value even if working in place. + else: + # Return a new, independent array. + result = al.ma.masked_array(array_data.copy(), mask=new_mask) + + return result + + +@_lenient_client(services=SERVICES) +def mask_cube(cube, points_to_mask, in_place=False, dim=None): + """ + Masks any cells in the cube's data array which correspond to cells marked + ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be + specified as a :class:`numpy.ndarray`, :class:`iris.coords.Coord` or + :class:`iris.cube.Cube`, following the same broadcasting approach as cube + arithmetic (see :ref:`cube maths`). + + Parameters + ---------- + + cube : iris.cube.Cube + Cube containing data that requires masking. - * result (`iris.cube.Cube`): - A cube whose data array is masked at points specified by input array. + points_to_mask : numpy.ndarray, iris.coords.Coord or iris.cube.Cube + Specifies booleans (or ones and zeros) indicating which points will be masked. + + in_place : bool, default=False + If `True`, masking is applied to the input cube. Otherwise a copy is masked + and returned. + + dim : int, optional + If `points_to_mask` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. + + Returns + ------- + + iris.cube.Cube + A cube whose data array is masked at points specified by ``points_to_mask``. + + Notes + ----- + + If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. """ - cube.data = ma.masked_array(cube.data) - cube.data[points_to_mask] = ma.masked - return cube + if in_place and not cube.has_lazy_data(): + # Ensure cube data is masked type so we can work on it in-place. + cube.data = ma.asanyarray(cube.data) + mask_function = functools.partial(_mask_array, in_place=True) + else: + mask_function = _mask_array + + input_metadata = cube.metadata + result = iris.analysis.maths._binary_op_common( + mask_function, + "mask", + cube, + points_to_mask, + cube.units, + in_place=in_place, + dim=dim, + sanitise_metadata=False, + ) + + # Resolve combines the metadata from the two operands, but we want to + # preserve the metadata from the (first) input cube. + result.metadata = input_metadata + + if not in_place: + return result def equalise_attributes(cubes): diff --git a/noxfile.py b/noxfile.py index 8b23948677..8aabf862fb 100755 --- a/noxfile.py +++ b/noxfile.py @@ -5,9 +5,13 @@ """ +from datetime import datetime import hashlib import os from pathlib import Path +import re +from tempfile import NamedTemporaryFile +from typing import Literal import nox from nox.logger import logger @@ -31,9 +35,7 @@ # https://github.com/numpy/numpy/pull/19478 # https://github.com/matplotlib/matplotlib/pull/22099 #: Common session environment variables. -ENV = dict( - NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512VL,AVX512BW,AVX512DQ,AVX512_SKX" -) +ENV = dict(NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512_SKX") def session_lockfile(session: nox.sessions.Session) -> Path: @@ -169,41 +171,6 @@ def prepare_venv(session: nox.sessions.Session) -> None: ) -@nox.session -def precommit(session: nox.sessions.Session): - """ - Perform pre-commit hooks of iris codebase. - - Parameters - ---------- - session: object - A `nox.sessions.Session` object. - - """ - import yaml - - # Pip install the session requirements. - session.install("pre-commit") - - # Load the pre-commit configuration YAML file. - with open(".pre-commit-config.yaml", "r") as fi: - config = yaml.load(fi, Loader=yaml.FullLoader) - - # List of pre-commit hook ids that we don't want to run. - excluded = ["no-commit-to-branch"] - - # Enumerate the ids of pre-commit hooks we do want to run. - ids = [ - hook["id"] - for entry in config["repos"] - for hook in entry["hooks"] - if hook["id"] not in excluded - ] - - # Execute the pre-commit hooks. - [session.run("pre-commit", "run", "--all-files", id) for id in ids] - - @nox.session(python=PY_VER, venv_backend="conda") def tests(session: nox.sessions.Session): """ @@ -253,7 +220,22 @@ def doctest(session: nox.sessions.Session): "doctest", external=True, ) - session.cd("..") + + +@nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") +def gallery(session: nox.sessions.Session): + """ + Perform iris gallery doc-tests. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.run( "python", "-m", @@ -289,48 +271,264 @@ def linkcheck(session: nox.sessions.Session): ) -@nox.session(python=PY_VER[-1], venv_backend="conda") +@nox.session(python=PY_VER, venv_backend="conda") +def wheel(session: nox.sessions.Session): + """ + Perform iris local wheel install and import test. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.cd("dist") + fname = list(Path(".").glob("scitools_iris-*.whl")) + if len(fname) == 0: + raise ValueError("Cannot find wheel to install.") + if len(fname) > 1: + emsg = ( + f"Expected to find 1 wheel to install, found {len(fname)} instead." + ) + raise ValueError(emsg) + session.install(fname[0].name) + session.run( + "python", + "-c", + "import iris; print(f'{iris.__version__=}')", + external=True, + ) + + +@nox.session @nox.parametrize( - ["ci_mode"], - [True, False], - ids=["ci compare", "full"], + "run_type", + ["overnight", "branch", "cperf", "sperf", "custom"], + ids=["overnight", "branch", "cperf", "sperf", "custom"], ) -def benchmarks(session: nox.sessions.Session, ci_mode: bool): +def benchmarks( + session: nox.sessions.Session, + run_type: Literal["overnight", "branch", "cperf", "sperf", "custom"], +): """ - Perform esmf-regrid performance benchmarks (using Airspeed Velocity). + Perform Iris performance benchmarks (using Airspeed Velocity). + + All run types require a single Nox positional argument (e.g. + ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters + section - and can optionally accept a series of further arguments that will + be added to session's ASV command. Parameters ---------- session: object A `nox.sessions.Session` object. - ci_mode: bool - Run a cut-down selection of benchmarks, comparing the current commit to - the last commit for performance regressions. - - Notes - ----- - ASV is set up to use ``nox --session=tests --install-only`` to prepare - the benchmarking environment. This session environment must use a Python - version that is also available for ``--session=tests``. + run_type: {"overnight", "branch", "cperf", "sperf", "custom"} + * ``overnight``: benchmarks all commits between the input **first + commit** to ``HEAD``, comparing each to its parent for performance + shifts. If a commit causes shifts, the output is saved to a file: + ``.asv/performance-shifts/``. Designed for checking the + previous 24 hours' commits, typically in a scheduled script. + * ``branch``: Performs the same operations as ``overnight``, but always + on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the + input **base branch**. Output from this run is never saved to a file. + Designed for testing if the active branch's changes cause performance + shifts - anticipating what would be caught by ``overnight`` once + merged. + **For maximum accuracy, avoid using the machine that is running this + session. Run time could be >1 hour for the full benchmark suite.** + * ``cperf``: Run the on-demand CPerf suite of benchmarks (part of the + UK Met Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` + only, and publish the results to the input **publish directory**, + within a unique subdirectory for this run. + * ``sperf``: As with CPerf, but for the SPerf suite. + * ``custom``: run ASV with the input **ASV sub-command**, without any + preset arguments - must all be supplied by the user. So just like + running ASV manually, with the convenience of re-using the session's + scripted setup steps. + + Examples + -------- + * ``nox --session="benchmarks(overnight)" -- a1b23d4`` + * ``nox --session="benchmarks(branch)" -- upstream/main`` + * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` + * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` + * ``nox --session="benchmarks(cperf)" -- my_publish_dir + * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` """ + # The threshold beyond which shifts are 'notable'. See `asv compare`` docs + # for more. + COMPARE_FACTOR = 1.2 + session.install("asv", "nox") + + data_gen_var = "DATA_GEN_PYTHON" + if data_gen_var in os.environ: + print("Using existing data generation environment.") + else: + print("Setting up the data generation environment...") + # Get Nox to build an environment for the `tests` session, but don't + # run the session. Will re-use a cached environment if appropriate. + session.run_always( + "nox", + "--session=tests", + "--install-only", + f"--python={_PY_VERSION_LATEST}", + ) + # Find the environment built above, set it to be the data generation + # environment. + data_gen_python = next( + Path(".nox").rglob(f"tests*/bin/python{_PY_VERSION_LATEST}") + ).resolve() + session.env[data_gen_var] = data_gen_python + + mule_dir = data_gen_python.parents[1] / "resources" / "mule" + if not mule_dir.is_dir(): + print("Installing Mule into data generation environment...") + session.run_always( + "git", + "clone", + "https://github.com/metomi/mule.git", + str(mule_dir), + external=True, + ) + session.run_always( + str(data_gen_python), + "-m", + "pip", + "install", + str(mule_dir / "mule"), + external=True, + ) + + print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") - def asv_exec(*sub_args: str) -> None: - run_args = ["asv", *sub_args] - session.run(*run_args) - - if ci_mode: - # If on a PR: compare to the base (target) branch. - # Else: compare to previous commit. - previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") - try: - asv_exec("continuous", "--factor=1.2", previous_commit, "HEAD") - finally: - asv_exec("compare", previous_commit, "HEAD") + # All run types require one Nox posarg. + run_type_arg = { + "overnight": "first commit", + "branch": "base branch", + "cperf": "publish directory", + "sperf": "publish directory", + "custom": "ASV sub-command", + } + if run_type not in run_type_arg.keys(): + message = f"Unsupported run-type: {run_type}" + raise NotImplementedError(message) + if not session.posargs: + message = ( + f"Missing mandatory first Nox session posarg: " + f"{run_type_arg[run_type]}" + ) + raise ValueError(message) + first_arg = session.posargs[0] + # Optional extra arguments to be passed down to ASV. + asv_args = session.posargs[1:] + + def asv_compare(*commits): + """Run through a list of commits comparing each one to the next.""" + commits = [commit[:8] for commit in commits] + shifts_dir = Path(".asv") / "performance-shifts" + for i in range(len(commits) - 1): + before = commits[i] + after = commits[i + 1] + asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" + session.run(*asv_command_.split(" ")) + + if run_type == "overnight": + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = session.run( + *asv_command_.split(" "), "--only-changed", silent=True + ) + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = (shifts_dir / after).with_suffix(".txt") + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) + + # Common ASV arguments for all run_types except `custom`. + asv_harness = ( + "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict " + "--show-stderr" + ) + + if run_type == "overnight": + first_commit = first_arg + commit_range = f"{first_commit}^^.." + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = session.run( + *git_command.split(" "), silent=True, external=True + ) + commit_list = commit_string.rstrip().split("\n") + asv_compare(*reversed(commit_list)) + + elif run_type == "branch": + base_branch = first_arg + git_command = f"git merge-base HEAD {base_branch}" + merge_base = session.run( + *git_command.split(" "), silent=True, external=True + )[:8] + + with NamedTemporaryFile("w") as hashfile: + hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.flush() + commit_range = f"HASHFILE:{hashfile.name}" + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + asv_compare(merge_base, "HEAD") + + elif run_type in ("cperf", "sperf"): + publish_dir = Path(first_arg) + if not publish_dir.is_dir(): + message = ( + f"Input 'publish directory' is not a directory: {publish_dir}" + ) + raise NotADirectoryError(message) + publish_subdir = ( + publish_dir + / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" + ) + publish_subdir.mkdir() + + # Activate on demand benchmarks (C/SPerf are deactivated for 'standard' runs). + session.env["ON_DEMAND_BENCHMARKS"] = "True" + commit_range = "upstream/main^!" + + asv_command = ( + asv_harness.format(posargs=commit_range) + f" --bench={run_type}" + ) + # C/SPerf benchmarks are much bigger than the CI ones: + # Don't fail the whole run if memory blows on 1 benchmark. + asv_command = asv_command.replace(" --strict", "") + # Only do a single round. + asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) + session.run(*asv_command.split(" "), *asv_args) + + asv_command = f"asv publish {commit_range} --html-dir={publish_subdir}" + session.run(*asv_command.split(" ")) + + # Print completion message. + location = Path().cwd() / ".asv" + print( + f'New ASV results for "{run_type}".\n' + f'See "{publish_subdir}",' + f'\n or JSON files under "{location / "results"}".' + ) + else: - # f5ceb808 = first commit supporting nox --install-only . - asv_exec("run", "f5ceb808..HEAD") + asv_subcommand = first_arg + assert run_type == "custom" + session.run("asv", asv_subcommand, *asv_args) diff --git a/pyproject.toml b/pyproject.toml index 26e6ae727a..bdb8a431e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,17 @@ [build-system] # Defined by PEP 518 requires = [ - "setuptools>=40.8.0", + "setuptools>=64", + "setuptools_scm[toml]>=7.0", "wheel", ] # Defined by PEP 517 build-backend = "setuptools.build_meta" +[tool.setuptools_scm] +write_to = "lib/iris/_version.py" +local_scheme = "dirty-tag" +version_scheme = "release-branch-semver" [tool.black] line-length = 79 @@ -37,3 +42,7 @@ extend_skip = [ ] skip_gitignore = "True" verbose = "False" + +[tool.pytest.ini_options] +addopts = "-ra" +testpaths = "lib/iris" diff --git a/requirements/ci/iris.yml b/requirements/ci/iris.yml index a76932b56e..1e473d36d5 120000 --- a/requirements/ci/iris.yml +++ b/requirements/ci/iris.yml @@ -1 +1 @@ -py38.yml \ No newline at end of file +py310.yml \ No newline at end of file diff --git a/requirements/ci/nox.lock/py310-linux-64.lock b/requirements/ci/nox.lock/py310-linux-64.lock new file mode 100644 index 0000000000..d88fd19a29 --- /dev/null +++ b/requirements/ci/nox.lock/py310-linux-64.lock @@ -0,0 +1,258 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 043088e81c1e979eac04ac622e72d5d9f2c559c9059eae30112aafa081dffa6d +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.6.15-ha878542_0.tar.bz2#c320890f77fd1d617fa876e0982002c2 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/noarch/tzdata-2022c-h191b570_0.tar.bz2#a56386ad31a7322940dd7d03fb3a9979 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.6.1-h7f98852_0.tar.bz2#0347ce6a34f8b55b544b141432c6d4c7 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.8-h27087fc_0.tar.bz2#e1b07832504eeba765d648389cc387a9 +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_ha7695d1_103.tar.bz2#a56c5033619bdf56a22a1f0a0fd286aa +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80 +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.13-h166bdaf_0.tar.bz2#4b5bee2e957570197327d0b20a718891 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_2.tar.bz2#839776c4e967bc881c21da197127a3ae +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee +https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_2.tar.bz2#8302381297332ea50532cf2c67961080 +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717 +https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.64-ha37c62d_0.tar.bz2#5896fbd58d0376df8556a4aba1ce4f71 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 +https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h753d276_4.tar.bz2#6b611734b73d639c084ac4be2fcd996a +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.2-h753d276_1.tar.bz2#90136dc0a305db4e1df24945d431457b +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_0.tar.bz2#9d3e24b1157af09abe5a2589119c7b1d +https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h57a0ea0_5.tar.bz2#5469312a373f481c05c380897fd7c923 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_2.tar.bz2#4533821485cde83ab12ff3d8bda83768 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h0e0dad5_3.tar.bz2#5627d42c13a9b117ae1701c6e195624f +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_0.tar.bz2#77f98ec0b224fd5ca8e7043e167efb83 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.6-h582c2e5_0_cpython.tar.bz2#6f009f92084e84884d1dff862b85eb00 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.2-h4ff8645_1.tar.bz2#2676ec698ce91567fca50654ac1b18ba +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 +https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.6.15-pyhd8ed1ab_1.tar.bz2#97349c8d67627cbf8f48d7e7e1773ea5 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.1.0-pyhd8ed1ab_0.tar.bz2#f7551a8a008dfad2b7ac9662dd124614 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251 +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-h8e229c2_0.tar.bz2#f314f79031fec74adc9bff50fbaffd89 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.7.1-pyhd8ed1ab_0.tar.bz2#984db277dfb9ea04a584aea39c6a34e4 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_0.tar.bz2#908fc30f89e27817d835b45f865536d7 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f +https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1 +https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-2_cp310.tar.bz2#9e7160cd0d865e98f6803f1fe15c8b61 +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb +https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5 +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 +https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1012.tar.bz2#9604a7c93dd37bcb6d6cc8d6b64223a4 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_0.tar.bz2#3e4b55b02998782f8ca9ceaaa4f5ada9 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py310hff52083_2.tar.bz2#1cdb74e021e4e0b703a8c2f7cc57d798 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py310hff52083_0.tar.bz2#8ea386e64531f1ecf4a5765181579e7e +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1002.tar.bz2#7b3f287fcb7683f67b3d953b79f412ea +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_0.tar.bz2#8dc3e2dce8fa122f8df4f3739d1f771b +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py310h5764c6d_1.tar.bz2#ec5a727504409ad1380fc2a84f83d002 +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py310h37cc914_2.tar.bz2#0211369f253eedce9e570b4f0e5a981a +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.2-py310h53a5b5f_0.tar.bz2#8b3cfad14508018915e88612f5a963cd +https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py310hbd86126_2.tar.bz2#443272de4234f6df4a78f50105edc741 +https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836 +https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py310hff52083_3.tar.bz2#97f9a22577338f91a94dfac5c1a65a50 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.0.1-h93bde94_1.tar.bz2#8259528ea471b0963a91ce174f002e55 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.1-py310h5764c6d_0.tar.bz2#eb3be71bc11a51ff49b6a0af9968f0ed +https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed +https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py310hff52083_5.tar.bz2#378f2260e871f3ea46c6fa58d9f05277 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py310h5764c6d_1.tar.bz2#b6f54b7c4177a745d5e6e4319282253a +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_4.tar.bz2#505dcf6be997e732d7a33831950dc3cf +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py310h5764c6d_0.tar.bz2#c42dcb37acd84b3ca197f03f57ef927d +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py310h5764c6d_1.tar.bz2#791689ce9e578e2e83b635974af61743 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.3-py310hff52083_1.tar.bz2#a91c9f0499e0f0f5912098c3462014b9 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py310h5764c6d_1004.tar.bz2#6499bb11b7feffb63b26847fc9181319 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.1-py310hde88566_0.tar.bz2#1f84cf065287d73aa0233d432d3a1ba9 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py310h597c629_0.tar.bz2#f285746449d16d92884f4ce0cfe26679 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.8.1-pyhd8ed1ab_0.tar.bz2#df5026dbf551bb992cdf247b08e11078 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.1-py310h5764c6d_0.tar.bz2#3dda361cb1fa5da73a75c4089d2ed338 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_0.tar.bz2#94cb81ffdce328f80c87ac9b01244632 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.1.0-hf9f4e7c_0.tar.bz2#7c1f73a8f7864a202b126d82e88ddffc +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1007.tar.bz2#c2ec7c118184ddfd855fc3698d1c8e63 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.3-py310h769672d_0.tar.bz2#e48c810453df0f03bb8fcdff5e1d9e9d +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h7f54b18_8.tar.bz2#f9dbcfbb942ec9a3c0249cb71da5c7d1 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.1-py310hf94497c_1.tar.bz2#aaa559c22c09139a504796bd453fd535 +https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.2-py310hff52083_0.tar.bz2#5d44c6ab93d445b6c433914753390e86 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py310hde88566_2.tar.bz2#a282f30e2e1efa1f210817597e144762 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py310hde88566_1.tar.bz2#cbfce984f85c64401e3d4fedf4bc4247 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.0-py310hdfbd76f_0.tar.bz2#e5d21b0cb4161a40221786f2f05b3903 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py310h5e49deb_0.tar.bz2#2f2c225d04e99ff99d6d3a86692ce968 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py310hd8f1fbe_0.tar.bz2#3d311837eadeb8137fca02bdb5a9751f +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_2.tar.bz2#46784478afa27e33b9d5f017c4deb49d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py310hde88566_0.tar.bz2#49790458218da5f86068f32e3938d334 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-hf6a322e_0.tar.bz2#6ea2ce6265c3207876ef2369b7479f08 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.3-pyhd8ed1ab_0.tar.bz2#682f05a8e4b047ce4bdcec9d69c12551 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.3-py310h8d5ebf3_2.tar.bz2#760bc53cc184c9d6eeca9a38099e5fa8 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py310h9fd08d4_101.tar.bz2#0c7d82a8e4a32c1231036eb8530f31b2 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.9-hc4f8a73_0.tar.bz2#b8e090dce29a036357552a009c770187 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2#1d7e241dfaf5475e893d4b824bb71b44 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310hd8f1fbe_0.tar.bz2#9e3db99607d6f9285b7348c2af28a095 +https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.3-py310he7eef42_2.tar.bz2#9212ffec588998a9b3ac573bba2e597e +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py310hff52083_0.tar.bz2#5af49a9342d50006017b897698921f43 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.4-ha5833f6_2.tar.bz2#dd3aa6715b9e9efaf842febf18ce4261 +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py310hd9c82d4_101.tar.bz2#0333d51ee594be40f50b157ac6f27b5a +https://conda.anaconda.org/conda-forge/linux-64/graphviz-5.0.1-h5abf519_0.tar.bz2#03f22ca50fcff4bbee39da0943ab8475 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310h29803b5_0.tar.bz2#b5fb5328cae86d0b1591fc4894e68238 +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_0.tar.bz2#70d6e72856de9551f83ae0f2de689a7a +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.3-py310hff52083_2.tar.bz2#46fb1538bf92de6d807feb81b462aa0f +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 368554bb25..af62d7e5b1 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -1,56 +1,57 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 0b8e98b045b5545a96321ab961f5e97fe2da8aa929328cc8df2d4d5f33ed8159 +# input_hash: 40cbe959a02aa488bdf70e6b6968135b05b560f9b9ed8768ccf1780314c0e219 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.10.8-ha878542_0.tar.bz2#575611b8a84f45960e87722eeb51fa26 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.6.15-ha878542_0.tar.bz2#c320890f77fd1d617fa876e0982002c2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_12.tar.bz2#f547bf125ab234cec9c89491b262fc2f -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_12.tar.bz2#7ff3b832ba5e6918c0d026976359d065 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7 https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.28-ha770c72_0.tar.bz2#56594fdd5a80774a80d546fbbccf2c03 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_12.tar.bz2#33c165be455015cc74e8d857182f3f58 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_12.tar.bz2#763c5ec8116d984b4a33342236d7da36 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_12.tar.bz2#d34efbb8d7d6312c816b4bb647b818b1 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.6.1-h7f98852_0.tar.bz2#0347ce6a34f8b55b544b141432c6d4c7 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.3-h9c3ff4c_0.tar.bz2#bd783d12b65023e333bb7016de41570b +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.8-h27087fc_0.tar.bz2#e1b07832504eeba765d648389cc387a9 +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_ha7695d1_103.tar.bz2#a56c5033619bdf56a22a1f0a0fd286aa https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.2-h9c3ff4c_0.tar.bz2#fe9a66a351bfa7a84c3108304c7bcba5 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-69.1-h9c3ff4c_0.tar.bz2#e0773c9556d588b062a4e1424a6a02fa -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h7f98852_0.tar.bz2#5c214edc675a7fb7cbb34b1d854e5141 -https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.13-h166bdaf_0.tar.bz2#4b5bee2e957570197327d0b20a718891 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.0-hf817b99_0.tar.bz2#b10bb2ebebfffa8800fa80ad3285719e https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe5266_0.tar.bz2#41532e4448c0cce086d6570f95e4e12e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_2.tar.bz2#839776c4e967bc881c21da197127a3ae https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee +https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.2-h7f98852_1.tar.bz2#46cf26ecc8775a0aab300ea1821aaa3c -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h9c3ff4c_0.tar.bz2#fb31bcb7af058244479ca635d20f0f4a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_2.tar.bz2#8302381297332ea50532cf2c67961080 +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717 https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 @@ -62,163 +63,195 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-13_linux64_openblas.tar.bz2#8a4038563ed92dfa622bd72c0d8f31d3 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 -https://conda.anaconda.org/conda-forge/linux-64/libclang-13.0.0-default_hc23dcda_0.tar.bz2#7b140452b5bc91e46410b84807307249 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.64-ha37c62d_0.tar.bz2#5896fbd58d0376df8556a4aba1ce4f71 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 +https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h753d276_4.tar.bz2#6b611734b73d639c084ac4be2fcd996a +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.2-h753d276_1.tar.bz2#90136dc0a305db4e1df24945d431457b +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_0.tar.bz2#9d3e24b1157af09abe5a2589119c7b1d +https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h57a0ea0_5.tar.bz2#5469312a373f481c05c380897fd7c923 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-ha95c52a_0.tar.bz2#5222b231b1ef49a7f60d40b363469b70 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-13_linux64_openblas.tar.bz2#b17676dbd6688396c3a3076259fb7907 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-13_linux64_openblas.tar.bz2#018b80e8f21d8560ae4961567e3e00c9 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.46.0-h812cca2_0.tar.bz2#507fa47e9075f889af8e8b72925379be -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h885dcf4_1.tar.bz2#d1355eaa48f465782f228275a0a69771 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.28-hfa10184_0.tar.bz2#aac17542e50a474e2e632878dc696d50 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_2.tar.bz2#4533821485cde83ab12ff3d8bda83768 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h0e0dad5_3.tar.bz2#5627d42c13a9b117ae1701c6e195624f +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_0.tar.bz2#77f98ec0b224fd5ca8e7043e167efb83 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.2-h4ff8645_1.tar.bz2#2676ec698ce91567fca50654ac1b18ba +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2#612385c4a83edb0619fe911d9da317f4 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_3.tar.bz2#511aa83cdfcc0132380db5daf2f15f27 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-h8e229c2_0.tar.bz2#f314f79031fec74adc9bff50fbaffd89 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_0.tar.bz2#908fc30f89e27817d835b45f865536d7 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.2-h3452ae3_0.tar.bz2#c363665b4aabe56aae4f8981cff5b153 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f +https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1 +https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2 +https://conda.anaconda.org/conda-forge/linux-64/python-3.8.13-h582c2e5_0_cpython.tar.bz2#8ec74710472994e2411a8020fa8589ce +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1012.tar.bz2#9604a7c93dd37bcb6d6cc8d6b64223a4 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.6.15-pyhd8ed1ab_1.tar.bz2#97349c8d67627cbf8f48d7e7e1773ea5 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.10-pyhd8ed1ab_0.tar.bz2#ea77236c8031cfa821720b21b4cb0ceb -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.1.0-pyhd8ed1ab_0.tar.bz2#f7551a8a008dfad2b7ac9662dd124614 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.94-ha180cfb_0.tar.bz2#c534c5248da4913002473919d76d0161 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.1.0-pyhd8ed1ab_0.tar.bz2#188e095f4dc38887bb48b065734b9e8d -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_3.tar.bz2#524a9f1718bac53a6cf4906bcc51d044 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251 +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.7.1-pyhd8ed1ab_0.tar.bz2#984db277dfb9ea04a584aea39c6a34e4 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.81.0-h2574ce0_0.tar.bz2#1f8655741d0269ca6756f131522da1e8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-14.1-hd57d9b9_1.tar.bz2#a7024916bfdf33a014a0cc803580c9a1 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1002.tar.bz2#7b3f287fcb7683f67b3d953b79f412ea +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.3.0-pyhd8ed1ab_0.tar.bz2#7bc119135be2a43e1701432399d8c28a +https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.0.1-h93bde94_1.tar.bz2#8259528ea471b0963a91ce174f002e55 +https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.7-pyhd8ed1ab_0.tar.bz2#727e2216d9c47455d8ddc060eb2caad9 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb +https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98 https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_0.tar.bz2#947f7f41958eabc0f6e886557512bb76 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5 https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha00ac49_1009.tar.bz2#d1dff57b8731c245d3247b46d002e1c9 -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py38h578d9bd_1.tar.bz2#52a6cee65a5d10ed1c3f0af24fb48dd3 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py38h3931269_0.tar.bz2#9c491a90ae11d08ca97326a0ed876f3a -https://conda.anaconda.org/conda-forge/linux-64/curl-7.81.0-h2574ce0_0.tar.bz2#3a95d393b490f82aa406f1892fad84d9 +https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_0.tar.bz2#a970d201055ec06a75db83bf25447eb2 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.1-py38h578d9bd_0.tar.bz2#26da12e39b1b93e82fb865e967d0cbe0 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1.tar.bz2#085365abfe53d5d13bb68b1dda0b439e -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.1-py38h6ae9a64_0.tar.bz2#9ec24c7acb2252816f1f6b6687317432 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_0.tar.bz2#94cb81ffdce328f80c87ac9b01244632 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.1.0-hf9f4e7c_0.tar.bz2#7c1f73a8f7864a202b126d82e88ddffc +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py38h578d9bd_0.tar.bz2#037225c33a50e99c5d4f86fac90f6de8 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_0.tar.bz2#ae54c61918e1cbd280b8587ed6219258 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py38h0a891b7_1.tar.bz2#20d003ad5f584e212c299f64cac46c05 +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38h97ac3a3_2.tar.bz2#fccce86e5fc8183bf2658ac9bfc535b4 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.2-py38h3a7f9d9_0.tar.bz2#a7579626c41b3975da213c0b53aefa29 https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py38ha3b2c9c_2.tar.bz2#a077cc2bb9d854074b1cf4607252da7a +https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836 +https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py38h578d9bd_3.tar.bz2#6ce4ce3d4490a56eb33b52c179609193 https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.1-h277dcde_0.tar.bz2#f2ceb1be6565c35e2db0ac948754751d -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_8.tar.bz2#11b72f5b1cc15427c89232321172a0bc -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_4.tar.bz2#9c4bbee6f682f2fc7d7803df3996e77e +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.1-py38h0a891b7_0.tar.bz2#e3908bd184030e7f4a3d837959ebf6d7 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h7f54b18_8.tar.bz2#f9dbcfbb942ec9a3c0249cb71da5c7d1 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.1-py38he1635e7_1.tar.bz2#3907607e23c3e18202960fc4217baa0a +https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_5.tar.bz2#11113c7e50bb81f30762fe8325f305e1 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.5.0-py38h578d9bd_0.tar.bz2#9807c89f3ce846015dbad3c1d04348a5 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.13.0-py38h578d9bd_0.tar.bz2#561081f4a30990533541979c9ee84732 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003.tar.bz2#9189b42c42b9c87b2b2068cbe31901a8 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.2-py38h6c62de6_0.tar.bz2#73892e60ccea826c7f7a2215e48d22cf -https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.1-pyhd8ed1ab_0.tar.bz2#7968db84df10b74d9792d66d7da216df -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.0-py38h497a2fe_0.tar.bz2#3d96473ac57b7260a3fc3bdb13d2db79 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.2.0-hb4a5f5f_0.tar.bz2#d03d53e6bcb97e6a97a1659fb38aa76e -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.0-py38h43a58ef_0.tar.bz2#23427f52c81076594a95c006ebf7552e -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h6c62de6_1.tar.bz2#a350e3f4ca899e95122f66806e048858 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py38h6c62de6_1.tar.bz2#2953d3fc0113fc6ffb955a5b72811fb0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py38h56a6a73_0.tar.bz2#2d318049369bb52d2687b0ac2be82751 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar.bz2#ec3b783081e14a9dc0eb5ce609649728 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py38h0a891b7_1.tar.bz2#69fc64e4f4c13abe0b8df699ddaa1051 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_4.tar.bz2#ba24ff01bb38c5cd5be54b45ef685db3 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py38h0a891b7_0.tar.bz2#acd276486a0067bee3098590f0952a0f +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h0a891b7_1.tar.bz2#83df0e9e3faffc295f12607438691465 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.3-py38h578d9bd_1.tar.bz2#30765568a158c9457d577cc83f0e8307 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h0a891b7_1004.tar.bz2#9fcaaca218dcfeb8da806d4fd4824aa0 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.1-py38h71d37f0_0.tar.bz2#acf7ef1f057459e9e707142a4b92e481 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py38h2b5fc30_0.tar.bz2#28e9acd6f13ed29f27d5550a1cf0554b +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.8.1-pyhd8ed1ab_0.tar.bz2#df5026dbf551bb992cdf247b08e11078 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.1-py38h0a891b7_0.tar.bz2#369c805e42d0244be7c097b39c38ebb4 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-hf6a322e_0.tar.bz2#6ea2ce6265c3207876ef2369b7479f08 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h71d37f0_1007.tar.bz2#c8d3d8f137f8af7b1daca318131223b1 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.3-py38h47df419_0.tar.bz2#91c5ac3f8f0e55a946be7b9ce489abfe +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.9-hc4f8a73_0.tar.bz2#b8e090dce29a036357552a009c770187 +https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.2-py38h578d9bd_0.tar.bz2#626d2b8f96c8c3d20198e6bd84d1cfb7 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h71d37f0_2.tar.bz2#cdef2f7b0e263e338016da4b77ae4c0b +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py38h71d37f0_1.tar.bz2#704f1776af689de568514b0ff9dd0fbe +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.0-py38hea3f02b_0.tar.bz2#d19e23bb56b31d2504a0ff4d46b7aabc +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py38h3b45516_0.tar.bz2#d8621497bcc7b369ef9cce25d5a58aeb +https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py38hfa26641_0.tar.bz2#b869c6b54a02c92fac8b10c0d9b32e43 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h1fd1430_1.tar.bz2#c494f75082f9c052944fda1b22c83336 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 -https://conda.anaconda.org/conda-forge/noarch/identify-2.4.6-pyhd8ed1ab_0.tar.bz2#d4030c75256440b8375b2f32c4ed35cd -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py38h2823cc8_101.tar.bz2#1dfe1cdee4532c72f893955259eb3de9 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.3-h9967ed3_0.tar.bz2#37f1c68380bc5dfe0f5bb2655e207a73 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.2-py38ha217159_3.tar.bz2#d7461e191f7a0522e4709612786bdf4e -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_2.tar.bz2#3f6ce81c7d28563fe2af763d9ff43e62 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py38h71d37f0_0.tar.bz2#b9e7f6f7509496a4a62906d02dfe3128 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-h0a9e6e8_2.tar.bz2#aa768fdaad03509a97df37f81163346b -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.17.0-py38h578d9bd_0.tar.bz2#839ac9dba9a6126c9532781a9ea4506b -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_8.tar.bz2#88368a5889f31dff922a2d57bbfc3f5b -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.8-pyhd8ed1ab_1.tar.bz2#53f1387c68c21cecb386e2cde51b3f7c +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.3-pyhd8ed1ab_0.tar.bz2#682f05a8e4b047ce4bdcec9d69c12551 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.3-py38h38b5ce0_2.tar.bz2#0db5b110946be87a04643c1ba95c6ef9 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py38h32db9c8_101.tar.bz2#d1451d40c8204594cdcf156363128000 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2#1d7e241dfaf5475e893d4b824bb71b44 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38hfa26641_0.tar.bz2#6ddbd9abb62e70243702c006b81c63e4 +https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.4-ha5833f6_2.tar.bz2#dd3aa6715b9e9efaf842febf18ce4261 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.3-py38h1816dc1_2.tar.bz2#0beb44c3333518cdbb4ccbf7913ff38a https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h8e749b2_2.tar.bz2#8c20fd968c8b6af73444b1199d5fb0cb -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py38h578d9bd_0.tar.bz2#0d78be9cf1c400ba8e3077cf060492f1 -https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2#7c1c427246b057b8fa97200ecdb2ed62 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.4.0-pyh6c4a22f_1.tar.bz2#a9025d14c2a609e0d895ad3e75b5369c -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb +https://conda.anaconda.org/conda-forge/linux-64/graphviz-5.0.1-h5abf519_0.tar.bz2#03f22ca50fcff4bbee39da0943ab8475 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py38h578d9bd_0.tar.bz2#ac8aa845f1177901eecf1518997ea0a1 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38h7492b6b_0.tar.bz2#59ece9f652baf50ee6b842db833896ae +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.3-py38h578d9bd_2.tar.bz2#3b6f187bade8a47d05c8a74c6385a900 +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_0.tar.bz2#70d6e72856de9551f83ae0f2de689a7a +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-1.0.0-pyhd8ed1ab_0.tar.bz2#9f633f2f2869184e31acfeae95b24345 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_1.tar.bz2#63d2f874f990fdcab47c822b608d6ade diff --git a/requirements/ci/nox.lock/py39-linux-64.lock b/requirements/ci/nox.lock/py39-linux-64.lock new file mode 100644 index 0000000000..3cfe2b9a29 --- /dev/null +++ b/requirements/ci/nox.lock/py39-linux-64.lock @@ -0,0 +1,258 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 87d5bb40e4218219f5c768306688f703396cce4593f26c2ff46d8adb198c9ae9 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.6.15-ha878542_0.tar.bz2#c320890f77fd1d617fa876e0982002c2 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/noarch/tzdata-2022c-h191b570_0.tar.bz2#a56386ad31a7322940dd7d03fb3a9979 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.6.1-h7f98852_0.tar.bz2#0347ce6a34f8b55b544b141432c6d4c7 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.8-h27087fc_0.tar.bz2#e1b07832504eeba765d648389cc387a9 +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_ha7695d1_103.tar.bz2#a56c5033619bdf56a22a1f0a0fd286aa +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80 +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.13-h166bdaf_0.tar.bz2#4b5bee2e957570197327d0b20a718891 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_2.tar.bz2#839776c4e967bc881c21da197127a3ae +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee +https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_2.tar.bz2#8302381297332ea50532cf2c67961080 +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717 +https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.64-ha37c62d_0.tar.bz2#5896fbd58d0376df8556a4aba1ce4f71 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 +https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h753d276_4.tar.bz2#6b611734b73d639c084ac4be2fcd996a +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.2-h753d276_1.tar.bz2#90136dc0a305db4e1df24945d431457b +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_0.tar.bz2#9d3e24b1157af09abe5a2589119c7b1d +https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h57a0ea0_5.tar.bz2#5469312a373f481c05c380897fd7c923 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_2.tar.bz2#4533821485cde83ab12ff3d8bda83768 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h0e0dad5_3.tar.bz2#5627d42c13a9b117ae1701c6e195624f +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_0.tar.bz2#77f98ec0b224fd5ca8e7043e167efb83 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.2-h4ff8645_1.tar.bz2#2676ec698ce91567fca50654ac1b18ba +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-h8e229c2_0.tar.bz2#f314f79031fec74adc9bff50fbaffd89 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_0.tar.bz2#908fc30f89e27817d835b45f865536d7 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f +https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1 +https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2 +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.13-h9a8a25e_0_cpython.tar.bz2#69bc307cc4d7396c5fccb26bbcc9c379 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1012.tar.bz2#9604a7c93dd37bcb6d6cc8d6b64223a4 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.6.15-pyhd8ed1ab_1.tar.bz2#97349c8d67627cbf8f48d7e7e1773ea5 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.1.0-pyhd8ed1ab_0.tar.bz2#f7551a8a008dfad2b7ac9662dd124614 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251 +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.7.1-pyhd8ed1ab_0.tar.bz2#984db277dfb9ea04a584aea39c6a34e4 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e +https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1002.tar.bz2#7b3f287fcb7683f67b3d953b79f412ea +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.0.1-h93bde94_1.tar.bz2#8259528ea471b0963a91ce174f002e55 +https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-2_cp39.tar.bz2#39adde4247484de2bb4000122fdcf665 +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb +https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5 +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb +https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_0.tar.bz2#61e961a94c8fd535e4496b17e7452dfe +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py39hf3d152e_3.tar.bz2#4f0fa7459a1f40a969aaad418b1c428c +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_0.tar.bz2#94cb81ffdce328f80c87ac9b01244632 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.1.0-hf9f4e7c_0.tar.bz2#7c1f73a8f7864a202b126d82e88ddffc +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py39hf3d152e_0.tar.bz2#4c2a0eabf0b8980b2c755646a6f750eb +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_0.tar.bz2#e8d1310648c189d6d11a2e13f73da1fe +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py39hb9d737c_1.tar.bz2#7cda413e43b252044a270c2477031c5c +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py39h32b9844_2.tar.bz2#b809706525f081610469169b671b2600 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.2-py39hba7629e_0.tar.bz2#25285f960f9c7f4e8ef56171af5e2a22 +https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py39hd5dbb17_2.tar.bz2#3b74a959f6a8008f5901de60b3572c09 +https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836 +https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py39hf3d152e_3.tar.bz2#c375c89340e563053f3656c7f134d265 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.1-py39hb9d737c_0.tar.bz2#5852c69cad74811dc3c95f9ab6a184ef +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h7f54b18_8.tar.bz2#f9dbcfbb942ec9a3c0249cb71da5c7d1 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.1-py39hdcf6798_1.tar.bz2#4edc329e5d60c4a1c1299cea60608d00 +https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py39hf3d152e_5.tar.bz2#d34b97a2386932b97c7cb80916a673e7 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py39hb9d737c_1.tar.bz2#9f71f72dad4fd7b9da7bcc2ba64505bc +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_4.tar.bz2#dcc47a3b751508507183d17e569805e5 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py39hb9d737c_0.tar.bz2#a3c57360af28c0d9956622af99a521cd +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py39hb9d737c_1.tar.bz2#ef84376736d1e8a814ccb06d1d814e6f +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.3-py39hf3d152e_1.tar.bz2#baa79a28aa08de404d9deae634b91e03 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py39hb9d737c_1004.tar.bz2#05a99367d885ec9990f25e74128a8a08 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.1-py39hd257fcd_0.tar.bz2#0911339f31c5fa644c312e4b3af95ea5 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py39hd97740a_0.tar.bz2#edc3668e7b71657237f94cf25e286478 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.8.1-pyhd8ed1ab_0.tar.bz2#df5026dbf551bb992cdf247b08e11078 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.1-py39hb9d737c_0.tar.bz2#b006086e249cf6d88758bff9b462f971 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-hf6a322e_0.tar.bz2#6ea2ce6265c3207876ef2369b7479f08 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39hd257fcd_1007.tar.bz2#e7527bcf8da0dad996aaefd046c17480 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.3-py39h1832856_0.tar.bz2#74e00961703972cf33b44a6fca7c3d51 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.9-hc4f8a73_0.tar.bz2#b8e090dce29a036357552a009c770187 +https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.2-py39hf3d152e_0.tar.bz2#a6bcf633d12aabdfc4cb32a09ebc0f31 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py39hd257fcd_2.tar.bz2#644be766007a1dc7590c3277647f81a1 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py39hd257fcd_1.tar.bz2#c4b698994b2d8d2e659ae02202e6abe4 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.0-py39h8ba3f38_0.tar.bz2#b098a256777cb9e2605451f183c78768 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py39h68ae834_0.tar.bz2#e871ee7de5bfa95095256e95e30be2a6 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py39h5a03fae_0.tar.bz2#e37704c6be07b8b14ffc1ce912802ce0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_2.tar.bz2#5a3bb9dc2fe08a4a6f2b61548a1431d6 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py39hd257fcd_0.tar.bz2#e0f1f1d3013be31359d3ac635b288469 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.3-pyhd8ed1ab_0.tar.bz2#682f05a8e4b047ce4bdcec9d69c12551 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.3-py39h19d6b11_2.tar.bz2#dc400bb297d8425b8b05367a21854b0b +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py39h71b8e10_101.tar.bz2#91e01aa93a2bcca96c9d64d2ce4f65f0 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2#1d7e241dfaf5475e893d4b824bb71b44 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h5a03fae_0.tar.bz2#1fd9112714d50ee5be3dbf4fd23964dc +https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.4-ha5833f6_2.tar.bz2#dd3aa6715b9e9efaf842febf18ce4261 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.3-py39hed214b2_2.tar.bz2#12964abb0bdcb4abb3c680b359560c1b +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py39h8bb458d_101.tar.bz2#347f324dd99dfb0b1479a466213b55bf +https://conda.anaconda.org/conda-forge/linux-64/graphviz-5.0.1-h5abf519_0.tar.bz2#03f22ca50fcff4bbee39da0943ab8475 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py39hf3d152e_0.tar.bz2#314c8cb1538706f62ec36cf64370f2b2 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h18e9c17_0.tar.bz2#5ed8f83afff3b64fa91f7a6af8d7ff04 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.3-py39hf3d152e_2.tar.bz2#98bf9bdfbac2ac73bbd1dc12a61519eb +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_0.tar.bz2#70d6e72856de9551f83ae0f2de689a7a +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a diff --git a/requirements/ci/py310.yml b/requirements/ci/py310.yml new file mode 100644 index 0000000000..8f730729b7 --- /dev/null +++ b/requirements/ci/py310.yml @@ -0,0 +1,51 @@ +name: iris-dev + +channels: + - conda-forge + +dependencies: + - python =3.10 + +# Setup dependencies. + - setuptools >=64 + - setuptools-scm >=7 + +# Core dependencies. + - cartopy >=0.20 + - cf-units >=3.1 + - cftime >=1.5 + - dask-core >=2.26 + - matplotlib + - netcdf4 + - numpy >=1.19 + - python-xxhash + - pyproj + - scipy + - shapely !=1.8.3 + +# Optional dependencies. + - esmpy >=7.0 + - graphviz + - iris-sample-data >=2.4.0 + - mo_pack + - nc-time-axis >=1.4 + - pandas + - pip + - python-stratify + +# Test dependencies. + - filelock + - imagehash >=4.0 + - pre-commit + - psutil + - pytest + - pytest-xdist + - requests + +# Documentation dependencies. + - sphinx + - sphinxcontrib-napoleon + - sphinx-copybutton + - sphinx-gallery >=0.11.0 + - sphinx-panels + - pydata-sphinx-theme = 0.8.1 diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index d3d7f9d0c2..d92a68076c 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -7,25 +7,28 @@ dependencies: - python =3.8 # Setup dependencies. - - setuptools >=40.8.0 + - setuptools >=64 + - setuptools-scm >=7 # Core dependencies. - cartopy >=0.20 - - cf-units >=3 + - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2 + - dask-core >=2.26 - matplotlib - netcdf4 - numpy >=1.19 - python-xxhash + - pyproj - scipy + - shapely !=1.8.3 # Optional dependencies. - esmpy >=7.0 - graphviz - iris-sample-data >=2.4.0 - mo_pack - - nc-time-axis >=1.3 + - nc-time-axis >=1.4 - pandas - pip - python-stratify @@ -33,15 +36,16 @@ dependencies: # Test dependencies. - filelock - imagehash >=4.0 - - nose - - pillow <7 - pre-commit + - psutil + - pytest + - pytest-xdist - requests # Documentation dependencies. - sphinx - sphinxcontrib-napoleon - sphinx-copybutton - - sphinx-gallery + - sphinx-gallery >=0.11.0 - sphinx-panels - - sphinx_rtd_theme + - pydata-sphinx-theme = 0.8.1 diff --git a/requirements/ci/py39.yml b/requirements/ci/py39.yml new file mode 100644 index 0000000000..001d3565d5 --- /dev/null +++ b/requirements/ci/py39.yml @@ -0,0 +1,51 @@ +name: iris-dev + +channels: + - conda-forge + +dependencies: + - python =3.9 + +# Setup dependencies. + - setuptools >=64 + - setuptools-scm >=7 + +# Core dependencies. + - cartopy >=0.20 + - cf-units >=3.1 + - cftime >=1.5 + - dask-core >=2.26 + - matplotlib + - netcdf4 + - numpy >=1.19 + - python-xxhash + - pyproj + - scipy + - shapely !=1.8.3 + +# Optional dependencies. + - esmpy >=7.0 + - graphviz + - iris-sample-data >=2.4.0 + - mo_pack + - nc-time-axis >=1.4 + - pandas + - pip + - python-stratify + +# Test dependencies. + - filelock + - imagehash >=4.0 + - pre-commit + - psutil + - pytest + - pytest-xdist + - requests + +# Documentation dependencies. + - sphinx + - sphinxcontrib-napoleon + - sphinx-copybutton + - sphinx-gallery >=0.11.0 + - sphinx-panels + - pydata-sphinx-theme = 0.8.1 diff --git a/setup.cfg b/setup.cfg index 1d3fb8b7c9..e5f0bc5b46 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,8 @@ [metadata] author = SciTools Developers -author_email = scitools-iris-dev@googlegroups.com +author_email = scitools.pub@gmail.com classifiers = - Development Status :: 5 Production/Stable + Development Status :: 5 - Production/Stable Intended Audience :: Science/Research License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) Operating System :: MacOS @@ -11,8 +11,9 @@ classifiers = Operating System :: Unix Programming Language :: Python Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering Topic :: Scientific/Engineering :: Atmospheric Science @@ -31,7 +32,7 @@ keywords = ugrid visualisation license = LGPL-3.0-or-later -license_file = COPYING.LESSER +license_files = COPYING.LESSER long_description = file: README.md long_description_content_type = text/markdown name = scitools-iris @@ -47,15 +48,16 @@ version = attr: iris.__version__ include_package_data = True install_requires = cartopy>=0.20 - cf-units>=3 + cf-units>=3.1 cftime>=1.5.0 - dask[array]>=2 + dask[array]>=2.26 matplotlib netcdf4 numpy>=1.19 scipy + shapely!=1.8.3 xxhash -packages = find: +packages = find_namespace: package_dir = =lib python_requires = @@ -69,20 +71,20 @@ where = lib docs = sphinx sphinx-copybutton - sphinx-gallery + sphinx-gallery>=0.11.0 sphinx_rtd_theme sphinxcontrib-napoleon sphinx-panels test = filelock imagehash>=4.0 - nose - pillow<7 pre-commit requests + pytest + pytest-xdist all = mo_pack - nc-time-axis>=1.3 + nc-time-axis>=1.4 pandas stratify %(docs)s diff --git a/setup.py b/setup.py index f48f3fe25a..061b35c262 100644 --- a/setup.py +++ b/setup.py @@ -1,42 +1,16 @@ -from contextlib import contextmanager import os -from shutil import copyfile import sys from setuptools import Command, setup from setuptools.command.build_py import build_py -from setuptools.command.develop import develop as develop_cmd - - -@contextmanager -def temporary_path(directory): - """ - Context manager that adds and subsequently removes the given directory - to sys.path - - """ - sys.path.insert(0, directory) - try: - yield - finally: - del sys.path[0] - - -# Add full path so Python doesn't load any __init__.py in the intervening -# directories, thereby saving setup.py from additional dependencies. -with temporary_path("lib/iris/tests/runner"): - from _runner import TestRunner # noqa: - - -class SetupTestRunner(TestRunner, Command): - pass +from setuptools.command.develop import develop class BaseCommand(Command): - """A valid no-op command for setuptools & distutils.""" + """A minimal no-op setuptools command.""" - description = "A no-op command." - user_options = [] + description: str = "A no-op command." + user_options: list = [] def initialize_options(self): pass @@ -48,75 +22,65 @@ def run(self): pass -class CleanSource(BaseCommand): - description = "clean orphaned pyc/pyo files from the source directory" - - def run(self): - for root_path, dir_names, file_names in os.walk("lib"): - for file_name in file_names: - if file_name.endswith("pyc") or file_name.endswith("pyo"): - compiled_path = os.path.join(root_path, file_name) - source_path = compiled_path[:-1] - if not os.path.exists(source_path): - print("Cleaning", compiled_path) - os.remove(compiled_path) - - -def copy_copyright(cmd, directory): - # Copy the COPYRIGHT information into the package root - iris_build_dir = os.path.join(directory, "iris") - for fname in ["COPYING", "COPYING.LESSER"]: - copyfile(fname, os.path.join(iris_build_dir, fname)) - +def custom_command(cmd, help=""): + """ + Factory function to generate a custom command that adds additional + behaviour to build the CF standard names module. -def build_std_names(cmd, directory): - # Call out to tools/generate_std_names.py to build std_names module. + """ - script_path = os.path.join("tools", "generate_std_names.py") - xml_path = os.path.join("etc", "cf-standard-name-table.xml") - module_path = os.path.join(directory, "iris", "std_names.py") - args = (sys.executable, script_path, xml_path, module_path) - cmd.spawn(args) + class CustomCommand(cmd): + description = help or cmd.description + def _build_std_names(self, directory): + # Call out to tools/generate_std_names.py to build std_names module. -def custom_cmd(command_to_override, functions, help_doc=""): - """ - Allows command specialisation to include calls to the given functions. + script_path = os.path.join("tools", "generate_std_names.py") + xml_path = os.path.join("etc", "cf-standard-name-table.xml") + module_path = os.path.join(directory, "iris", "std_names.py") + args = (sys.executable, script_path, xml_path, module_path) + self.spawn(args) - """ + def finalize_options(self): + # Execute the parent "cmd" class method. + cmd.finalize_options(self) - class ExtendedCommand(command_to_override): - description = help_doc or command_to_override.description + if ( + not hasattr(self, "editable_mode") + or self.editable_mode is None + ): + # Default to editable i.e., applicable to "std_names" and + # and "develop" commands. + self.editable_mode = True def run(self): - # Run the original command first to make sure all the target - # directories are in place. - command_to_override.run(self) + # Execute the parent "cmd" class method. + cmd.run(self) + + # Determine the target root directory + if self.editable_mode: + # Pick the source dir instead (currently in the sub-dir "lib"). + target = "lib" + msg = "in-place" + else: + # Not editable - must be building. + target = self.build_lib + msg = "as-build" - # build_lib is defined if we are building the package. Otherwise - # we want to to the work in-place. - dest = getattr(self, "build_lib", None) - if dest is None: - print(" [Running in-place]") - # Pick the source dir instead (currently in the sub-dir "lib") - dest = "lib" + print(f"\n[Running {msg}]") - for func in functions: - func(self, dest) + # Build the CF standard names. + self._build_std_names(target) - return ExtendedCommand + return CustomCommand custom_commands = { - "test": SetupTestRunner, - "develop": custom_cmd(develop_cmd, [build_std_names]), - "build_py": custom_cmd(build_py, [build_std_names, copy_copyright]), - "std_names": custom_cmd( - BaseCommand, - [build_std_names], - help_doc="generate CF standard name module", + "develop": custom_command(develop), + "build_py": custom_command(build_py), + "std_names": custom_command( + BaseCommand, help="generate CF standard names" ), - "clean_source": CleanSource, } diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index 9d5705c7a7..f05210be87 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -53,7 +53,8 @@ 'lock', '--filename-template', ofile_template, '--file', infile, + '-k', 'explicit', '--platform', 'linux-64' ]) print(f"lockfile saved to {ofile_template}".format(platform='linux-64'), - file=sys.stderr) \ No newline at end of file + file=sys.stderr)