diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..4f67c422f8 --- /dev/null +++ b/.flake8 @@ -0,0 +1,49 @@ +[flake8] +# References: +# https://flake8.readthedocs.io/en/latest/user/configuration.html +# https://flake8.readthedocs.io/en/latest/user/error-codes.html +# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes + +max-line-length = 80 +max-complexity = 50 +select = C,E,F,W,B,B950 +ignore = + # E203: whitespace before ':' + E203, + # E226: missing whitespace around arithmetic operator + E226, + # E231: missing whitespace after ',', ';', or ':' + E231, + # E402: module level imports on one line + E402, + # E501: line too long + E501, + # E731: do not assign a lambda expression, use a def + E731, + # W503: line break before binary operator + W503, + # W504: line break after binary operator + W504, +exclude = + # + # ignore the following directories + # + .eggs, + build, + docs/src/sphinxext/*, + tools/*, + benchmarks/*, + # + # ignore auto-generated files + # + _ff_cross_refrences.py, + std_names.py, + um_cf_map.py, + # + # ignore third-party files + # + gitwash_dumper.py, + # + # convenience imports + # + lib/iris/common/__init__.py diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..f6cae020f3 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,20 @@ +# Format: numpy array format (#5235) +c18dcd8dafef0cc7bbbf80dfce66f76a46ce59c5 + +# style: flake8 (#3755) +7c86bc0168684345dc475457b1a77dadc77ce9bb + +# style: black (#3518) +ffcfad475e0593e1e40895453cf1df154e5f6f2c + +# style: isort (#4174) +15bbcc5ac3d539cb6e820148b66e7cf55d91c5d2 + +# style: blacken-docs (#4205) +1572e180243e492d8ff76fa8cdefb82ef6f90415 + +# style: sort-all (#4353) +64705dbc40881233aae45f051d96049150369e53 + +# style: codespell (#5186) +417aa6bbd9b10d25cad7def54d47ef4d718bc38d diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 9ae3534c76..5be56c1d80 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -12,20 +12,24 @@ on: description: "Argument to be passed to the overnight benchmark script." required: false type: string + pull_request: + types: [labeled] jobs: benchmark: - if: "github.repository == 'SciTools/iris'" + if: > + github.repository == 'SciTools/iris' && + (github.event_name != 'pull_request' || + github.event.label.name == 'benchmark_this') runs-on: ubuntu-latest env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.18" + IRIS_TEST_DATA_VERSION: "2.19" # Lets us manually bump the cache to rebuild ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" - PY_VER: 3.8 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it @@ -33,9 +37,9 @@ jobs: with: fetch-depth: 0 - - name: Install Nox + - name: Install ASV & Nox run: | - pip install nox + pip install asv nox - name: Cache environment directories id: cache-env-dir @@ -68,7 +72,14 @@ jobs: run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV + - name: Benchmark this pull request + if: ${{ github.event.label.name == 'benchmark_this' }} + run: | + git checkout ${{ github.head_ref }} + python benchmarks/bm_runner.py branch origin/${{ github.base_ref }} + - name: Run overnight benchmarks + if: ${{ github.event_name != 'pull_request' }} run: | first_commit=${{ inputs.first_commit }} if [ "$first_commit" == "" ] @@ -78,10 +89,11 @@ jobs: if [ "$first_commit" != "" ] then - nox --session="benchmarks(overnight)" -- $first_commit + python benchmarks/bm_runner.py overnight $first_commit fi - name: Create issues for performance shifts + if: ${{ github.event_name != 'pull_request' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | diff --git a/.github/workflows/ci-citation b/.github/workflows/ci-citation new file mode 100644 index 0000000000..64bd16f8c5 --- /dev/null +++ b/.github/workflows/ci-citation @@ -0,0 +1,30 @@ +name: ci-citation + +on: + pull_request: + paths: + - "CITATION.cff" + + push: + paths: + - "CITATION.cff" + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + validate: + name: "validate" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: "check CITATION.cff" + uses: citation-file-format/cffconvert-github-action@2.0.0 + with: + args: "--validate" diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml new file mode 100644 index 0000000000..dd017fd84b --- /dev/null +++ b/.github/workflows/ci-manifest.yml @@ -0,0 +1,26 @@ +# Reference +# - https://github.com/actions/checkout + +name: ci-manifest + +on: + pull_request: + branches: + - "*" + + push: + branches-ignore: + - "auto-update-lockfiles" + - "pre-commit-ci-update-config" + - "dependabot/*" + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + manifest: + name: "check-manifest" + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.05.0 diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index cee98dc33d..5c48966ce8 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -35,18 +35,22 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] - python-version: ["3.10"] - session: ["tests", "doctest", "gallery", "linkcheck"] + python-version: ["3.11"] + session: ["doctest", "gallery", "linkcheck"] include: - os: "ubuntu-latest" - python-version: "3.9" + python-version: "3.11" + session: "tests" + coverage: "--coverage" + - os: "ubuntu-latest" + python-version: "3.10" session: "tests" - os: "ubuntu-latest" - python-version: "3.8" + python-version: "3.9" session: "tests" env: - IRIS_TEST_DATA_VERSION: "2.18" + IRIS_TEST_DATA_VERSION: "2.19" ENV_NAME: "ci-tests" steps: @@ -59,7 +63,7 @@ jobs: CACHE_WEEKS: 2 run: | echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} - echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/locks/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} - name: "data cache" uses: ./.github/workflows/composite/iris-data-cache @@ -107,7 +111,7 @@ jobs: - name: "nox cache" uses: ./.github/workflows/composite/nox-cache with: - cache_build: 0 + cache_build: 2 env_name: ${{ env.ENV_NAME }} lock_file: ${{ env.LOCK_FILE }} @@ -133,4 +137,8 @@ jobs: env: PY_VER: ${{ matrix.python-version }} run: | - nox --session ${{ matrix.session }} -- --verbose + nox --session ${{ matrix.session }} -- --verbose ${{ matrix.coverage }} + + - name: Upload coverage report + uses: codecov/codecov-action@v3 + if: ${{ matrix.coverage }} diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index a00833b118..942d528f6d 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -35,9 +35,7 @@ jobs: - name: "building" shell: bash run: | - # require build with explicit --sdist and --wheel in order to - # get correct version associated with sdist and bdist artifacts - pipx run build --sdist --wheel + pipx run build - uses: actions/upload-artifact@v3 with: @@ -54,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11"] session: ["wheel"] env: ENV_NAME: "ci-wheels" @@ -74,7 +72,7 @@ jobs: CACHE_WEEKS: 2 run: | echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} - echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/locks/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} - name: "conda package cache" uses: ./.github/workflows/composite/conda-pkg-cache @@ -103,7 +101,7 @@ jobs: - name: "nox cache" uses: ./.github/workflows/composite/nox-cache with: - cache_build: 0 + cache_build: 1 env_name: ${{ env.ENV_NAME }} lock_file: ${{ env.LOCK_FILE }} @@ -132,7 +130,8 @@ jobs: name: "publish to test.pypi" runs-on: ubuntu-latest # upload to Test PyPI for every commit on main branch - if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' + # and check for the SciTools repo + if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' && github.repository_owner == 'SciTools' steps: - uses: actions/download-artifact@v3 with: @@ -152,7 +151,7 @@ jobs: name: "publish to pypi" runs-on: ubuntu-latest # upload to PyPI for every tag starting with 'v' - if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') && github.repository_owner == 'SciTools' steps: - uses: actions/download-artifact@v3 with: diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 94c20aedb9..4068d68ed8 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@main + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.05.0 secrets: inherit diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 44b77e5c7d..203dc43b4e 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -14,7 +14,7 @@ jobs: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v7 + - uses: actions/stale@v8 with: repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 512fbab231..4d0b474e8a 100644 --- a/.gitignore +++ b/.gitignore @@ -77,4 +77,3 @@ docs/iris_image_test_output/ # Files generated during test runs. lib/iris/tests/results/**/*.dot -lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7c95eeaca3..3b8920c694 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,8 +28,15 @@ repos: # Don't commit to main branch. - id: no-commit-to-branch +- repo: https://github.com/codespell-project/codespell + rev: "v2.2.4" + hooks: + - id: codespell + types_or: [asciidoc, python, markdown, rst] + additional_dependencies: [tomli] + - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 23.3.0 hooks: - id: black pass_filenames: false @@ -40,7 +47,6 @@ repos: hooks: - id: flake8 types: [file, python] - args: [--config=./setup.cfg] - repo: https://github.com/pycqa/isort rev: 5.12.0 diff --git a/.readthedocs.yml b/.readthedocs.yml index 95f828a873..af1a8f6303 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -19,7 +19,7 @@ build: - git stash pop conda: - environment: requirements/ci/readthedocs.yml + environment: requirements/readthedocs.yml sphinx: configuration: docs/src/conf.py @@ -29,5 +29,3 @@ python: install: - method: pip path: . - extra_requirements: - - docs diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..bf5cc6bdbd --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,11 @@ + cff-version: 1.2.0 +message: "If Iris played an important part in your research then please add us to your reference list by using the references below." +title: "Iris" +authors: + - "The Iris development team" +abstract: "A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data" +license: "LGPL-3.0" +doi: "10.5281/zenodo.595182" +url: "http://scitools.org.uk/" +repository-code: "https://github.com/SciTools/iris" +type: "software" diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..bb040d21c5 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,133 @@ + +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +scitools.pub@gmail.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/MANIFEST.in b/MANIFEST.in index ad28df9c7c..329cf79c5d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,20 +1,30 @@ -# Top-level files -include CHANGES COPYING COPYING.LESSER prune .github +prune benchmarks +prune docs +prune etc +recursive-include lib *.cdl *.cml *.json *.md *.py *.template *.txt *.xml +prune requirements +recursive-include requirements *.txt +prune tools +exclude .flake8 +exclude .git-blame-ignore-revs +exclude .git_archival.txt +exclude .gitattributes exclude .gitignore +exclude .mailmap +exclude .pre-commit-config.yaml +exclude .readthedocs.yml +exclude CHANGES +exclude CODE_OF_CONDUCT.md +exclude codecov.yml +include COPYING +include COPYING.LESSER +exclude Makefile +exclude noxfile.py -# Files required for conda package management -recursive-include requirements * - -# Files required to build docs -recursive-include docs * -prune docs/src/_build -prune docs/src/generated -prune docs/gallery_tests - -# Files required to build std_names module -include tools/generate_std_names.py +# files required to build iris.std_names module include etc/cf-standard-name-table.xml +include tools/generate_std_names.py global-exclude *.py[cod] -global-exclude __pycache__ \ No newline at end of file +global-exclude __pycache__ diff --git a/Makefile b/Makefile index 74a87db427..0bb56edbf9 100755 --- a/Makefile +++ b/Makefile @@ -1,2 +1,2 @@ lockfiles: - python tools/update_lockfiles.py -o requirements/ci/nox.lock requirements/ci/py*.yml \ No newline at end of file + python tools/update_lockfiles.py -o requirements/locks requirements/py*.yml \ No newline at end of file diff --git a/README.md b/README.md index cdf4b2b043..53d24b0162 100644 --- a/README.md +++ b/README.md @@ -9,44 +9,16 @@ analysing and visualising Earth science data -

- -ci-tests - -Documentation Status - -pre-commit.ci status -

- -

- -conda-forge - -pypi - -latest release - -Commits since last release - -zenodo - -black - -github discussions - -twitter scitools_iris -

+| | | +|------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ⚙️ CI | [![ci-manifest](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml) [![ci-tests](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml) [![ci-wheels](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml) [![pre-commit](https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg)](https://results.pre-commit.ci/latest/github/SciTools/iris/main) | +| 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | +| 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | +| 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | +| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/COPYING.LESSER) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | +| 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | +| 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | +| |

For documentation see the diff --git a/benchmarks/README.md b/benchmarks/README.md index 8dffd473f3..316c8f9e32 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -11,13 +11,33 @@ shifts in performance being flagged in a new GitHub issue. ## Running benchmarks +On GitHub: a Pull Request can be benchmarked by adding the +https://github.com/SciTools/iris/labels/benchmark_this +label to the PR (to run a second time: just remove and re-add the label). +Note that a benchmark run could take an hour or more to complete. +This runs a comparison between the PR branch's ``HEAD`` and its merge-base with +the PR's base branch, thus showing performance differences introduced +by the PR. (This run is managed by +[the aforementioned GitHub Action](../.github/workflows/benchmark.yml)). + `asv ...` commands must be run from this directory. You will need to have ASV installed, as well as Nox (see [Benchmark environments](#benchmark-environments)). -[Iris' noxfile](../noxfile.py) includes a `benchmarks` session that provides -conveniences for setting up before benchmarking, and can also replicate the -automated overnight run locally. See the session docstring for detail. +The benchmark runner ([bm_runner.py](./bm_runner.py)) provides conveniences for +common benchmark setup and run tasks, including replicating the automated +overnight run locally. See `python bm_runner.py --help` for detail. + +A significant portion of benchmark run time is environment management. Run-time +can be reduced by placing the benchmark environment on the same file system as +your +[Conda package cache](https://conda.io/projects/conda/en/latest/user-guide/configuration/use-condarc.html#specify-pkg-directories), +if it is not already. You can achieve this by either: + +- Temporarily reconfiguring `delegated_env_commands` and `delegated_env_parent` + in [asv.conf.json](asv.conf.json) to reference a location on the same file + system as the Conda package cache. +- Moving your Iris repo to the same file system as the Conda package cache. ### Environment variables @@ -26,8 +46,8 @@ automated overnight run locally. See the session docstring for detail. benchmark scripts. * `DATA_GEN_PYTHON` - required - path to a Python executable that can be used to generate benchmark test objects/files; see -[Data generation](#data-generation). The Nox session sets this automatically, -but will defer to any value already set in the shell. +[Data generation](#data-generation). The benchmark runner sets this +automatically, but will defer to any value already set in the shell. * `BENCHMARK_DATA` - optional - path to a directory for benchmark synthetic test data, which the benchmark scripts will create if it doesn't already exist. Defaults to `/benchmarks/.data/` if not set. Note that some of @@ -36,7 +56,7 @@ plan accordingly. * `ON_DEMAND_BENCHMARKS` - optional - when set (to any value): benchmarks decorated with `@on_demand_benchmark` are included in the ASV run. Usually coupled with the ASV `--bench` argument to only run the benchmark(s) of -interest. Is set during the Nox `cperf` and `sperf` sessions. +interest. Is set during the benchmark runner `cperf` and `sperf` sub-commands. ## Writing benchmarks diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 7337eaa8c7..faa7f6daee 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -4,6 +4,7 @@ "project_url": "https://github.com/SciTools/iris", "repo": "..", "environment_type": "conda-delegated", + "conda_channels": ["conda-forge", "defaults"], "show_commit_url": "http://github.com/scitools/iris/commit/", "branches": ["upstream/main"], @@ -19,8 +20,7 @@ // * No build-time environment variables. // * Is run in the same environment as the ASV install itself. "delegated_env_commands": [ - "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", - "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + "PY_VER=3.11 nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" ], // The parent directory of the above environment. // The most recently modified environment in the directory will be used. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 3b2d77a80a..c5f8fb564e 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -50,7 +50,7 @@ def _make_region_cubes(self, full_mesh_cube): i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] i_faces = i_faces.flatten() - # reduce back to orignal length, wrap any overflows into valid range + # reduce back to original length, wrap any overflows into valid range i_faces = i_faces[:n_faces] % n_faces # Divide into regions -- always slightly uneven, since 7 doesn't divide diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index d3d128c7d8..e27b3b1996 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -46,7 +46,7 @@ def _make_region_cubes(self, full_mesh_cube): i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] i_faces = i_faces.flatten() - # reduce back to orignal length, wrap any overflows into valid range + # reduce back to original length, wrap any overflows into valid range i_faces = i_faces[:n_faces] % n_faces # Divide into regions -- always slightly uneven, since 7 doesn't divide diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py new file mode 100644 index 0000000000..f3efb0ea31 --- /dev/null +++ b/benchmarks/bm_runner.py @@ -0,0 +1,401 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Argparse conveniences for executing common types of benchmark runs. +""" + +from abc import ABC, abstractmethod +import argparse +from argparse import ArgumentParser +from datetime import datetime +from importlib import import_module +from os import environ +from pathlib import Path +import re +import subprocess +from tempfile import NamedTemporaryFile +from typing import Literal + +# The threshold beyond which shifts are 'notable'. See `asv compare`` docs +# for more. +COMPARE_FACTOR = 1.2 + +BENCHMARKS_DIR = Path(__file__).parent + +# Common ASV arguments for all run_types except `custom`. +ASV_HARNESS = ( + "run {posargs} --attribute rounds=4 --interleave-rounds --strict " + "--show-stderr" +) + + +def _subprocess_run_print(args, **kwargs): + # Use subprocess for printing to reduce chance of printing out of sequence + # with the subsequent calls. + subprocess.run(["echo", f"BM_RUNNER DEBUG: {' '.join(args)}"]) + return subprocess.run(args, **kwargs) + + +def _subprocess_run_asv(args, **kwargs): + args.insert(0, "asv") + kwargs["cwd"] = BENCHMARKS_DIR + return _subprocess_run_print(args, **kwargs) + + +def _check_requirements(package: str) -> None: + try: + import_module(package) + except ImportError as exc: + message = ( + f"No {package} install detected. Benchmarks can only " + f"be run in an environment including {package}." + ) + raise Exception(message) from exc + + +def _prep_data_gen_env() -> None: + """ + Create/access a separate, unchanging environment for generating test data. + """ + + root_dir = BENCHMARKS_DIR.parent + python_version = "3.11" + data_gen_var = "DATA_GEN_PYTHON" + if data_gen_var in environ: + print("Using existing data generation environment.") + else: + print("Setting up the data generation environment ...") + # Get Nox to build an environment for the `tests` session, but don't + # run the session. Will re-use a cached environment if appropriate. + _subprocess_run_print( + [ + "nox", + f"--noxfile={root_dir / 'noxfile.py'}", + "--session=tests", + "--install-only", + f"--python={python_version}", + ] + ) + # Find the environment built above, set it to be the data generation + # environment. + data_gen_python = next( + (root_dir / ".nox").rglob(f"tests*/bin/python{python_version}") + ).resolve() + environ[data_gen_var] = str(data_gen_python) + + print("Installing Mule into data generation environment ...") + mule_dir = data_gen_python.parents[1] / "resources" / "mule" + if not mule_dir.is_dir(): + _subprocess_run_print( + [ + "git", + "clone", + "https://github.com/metomi/mule.git", + str(mule_dir), + ] + ) + _subprocess_run_print( + [ + str(data_gen_python), + "-m", + "pip", + "install", + str(mule_dir / "mule"), + ] + ) + + print("Data generation environment ready.") + + +def _setup_common() -> None: + _check_requirements("asv") + _check_requirements("nox") + + _prep_data_gen_env() + + print("Setting up ASV ...") + _subprocess_run_asv(["machine", "--yes"]) + + print("Setup complete.") + + +def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: + """Run through a list of commits comparing each one to the next.""" + commits = [commit[:8] for commit in commits] + shifts_dir = BENCHMARKS_DIR / ".asv" / "performance-shifts" + for i in range(len(commits) - 1): + before = commits[i] + after = commits[i + 1] + asv_command = ( + f"compare {before} {after} --factor={COMPARE_FACTOR} --split" + ) + _subprocess_run_asv(asv_command.split(" ")) + + if overnight_mode: + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = _subprocess_run_asv( + [*asv_command.split(" "), "--only-changed"], + capture_output=True, + text=True, + ).stdout + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = (shifts_dir / after).with_suffix(".txt") + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) + + +class _SubParserGenerator(ABC): + """Convenience for holding all the necessary argparse info in 1 place.""" + + name: str = NotImplemented + description: str = NotImplemented + epilog: str = NotImplemented + + def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None: + self.subparser: ArgumentParser = subparsers.add_parser( + self.name, + description=self.description, + epilog=self.epilog, + formatter_class=argparse.RawTextHelpFormatter, + ) + self.add_arguments() + self.subparser.add_argument( + "asv_args", + nargs=argparse.REMAINDER, + help="Any number of arguments to pass down to ASV.", + ) + self.subparser.set_defaults(func=self.func) + + @abstractmethod + def add_arguments(self) -> None: + """All self.subparser.add_argument() calls.""" + _ = NotImplemented + + @staticmethod + @abstractmethod + def func(args: argparse.Namespace): + """ + The function to return when the subparser is parsed. + + `func` is then called, performing the user's selected sub-command. + + """ + _ = args + return NotImplemented + + +class Overnight(_SubParserGenerator): + name = "overnight" + description = ( + "Benchmarks all commits between the input **first_commit** to ``HEAD``, " + "comparing each to its parent for performance shifts. If a commit causes " + "shifts, the output is saved to a file:\n" + "``.asv/performance-shifts/``\n\n" + "Designed for checking the previous 24 hours' commits, typically in a " + "scheduled script." + ) + epilog = ( + "e.g. python bm_runner.py overnight a1b23d4\n" + "e.g. python bm_runner.py overnight a1b23d4 --bench=regridding" + ) + + def add_arguments(self) -> None: + self.subparser.add_argument( + "first_commit", + type=str, + help="The first commit in the benchmarking commit sequence.", + ) + + @staticmethod + def func(args: argparse.Namespace) -> None: + _setup_common() + + commit_range = f"{args.first_commit}^^.." + asv_command = ASV_HARNESS.format(posargs=commit_range) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = _subprocess_run_print( + git_command.split(" "), capture_output=True, text=True + ).stdout + commit_list = commit_string.rstrip().split("\n") + _asv_compare(*reversed(commit_list), overnight_mode=True) + + +class Branch(_SubParserGenerator): + name = "branch" + description = ( + "Performs the same operations as ``overnight``, but always on two commits " + "only - ``HEAD``, and ``HEAD``'s merge-base with the input " + "**base_branch**. Output from this run is never saved to a file. Designed " + "for testing if the active branch's changes cause performance shifts - " + "anticipating what would be caught by ``overnight`` once merged.\n\n" + "**For maximum accuracy, avoid using the machine that is running this " + "session. Run time could be >1 hour for the full benchmark suite.**" + ) + epilog = ( + "e.g. python bm_runner.py branch upstream/main\n" + "e.g. python bm_runner.py branch upstream/main --bench=regridding" + ) + + def add_arguments(self) -> None: + self.subparser.add_argument( + "base_branch", + type=str, + help="A branch that has the merge-base with ``HEAD`` - ``HEAD`` will be benchmarked against that merge-base.", + ) + + @staticmethod + def func(args: argparse.Namespace) -> None: + _setup_common() + + git_command = f"git merge-base HEAD {args.base_branch}" + merge_base = _subprocess_run_print( + git_command.split(" "), capture_output=True, text=True + ).stdout[:8] + + with NamedTemporaryFile("w") as hashfile: + hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.flush() + commit_range = f"HASHFILE:{hashfile.name}" + asv_command = ASV_HARNESS.format(posargs=commit_range) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + + _asv_compare(merge_base, "HEAD") + + +class _CSPerf(_SubParserGenerator, ABC): + """Common code used by both CPerf and SPerf.""" + + description = ( + "Run the on-demand {} suite of benchmarks (part of the UK Met " + "Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` only, " + "and publish the results to the input **publish_dir**, within a " + "unique subdirectory for this run." + ) + epilog = ( + "e.g. python bm_runner.py {0} my_publish_dir\n" + "e.g. python bm_runner.py {0} my_publish_dir --bench=regridding" + ) + + def add_arguments(self) -> None: + self.subparser.add_argument( + "publish_dir", + type=str, + help="HTML results will be published to a sub-dir in this dir.", + ) + + @staticmethod + def csperf( + args: argparse.Namespace, run_type: Literal["cperf", "sperf"] + ) -> None: + _setup_common() + + publish_dir = Path(args.publish_dir) + if not publish_dir.is_dir(): + message = ( + f"Input 'publish directory' is not a directory: {publish_dir}" + ) + raise NotADirectoryError(message) + publish_subdir = ( + publish_dir + / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" + ) + publish_subdir.mkdir() + + # Activate on demand benchmarks (C/SPerf are deactivated for + # 'standard' runs). + environ["ON_DEMAND_BENCHMARKS"] = "True" + commit_range = "upstream/main^!" + + asv_command = ( + ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" + ) + # C/SPerf benchmarks are much bigger than the CI ones: + # Don't fail the whole run if memory blows on 1 benchmark. + asv_command = asv_command.replace(" --strict", "") + # Only do a single round. + asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + + asv_command = f"publish {commit_range} --html-dir={publish_subdir}" + _subprocess_run_asv(asv_command.split(" ")) + + # Print completion message. + location = BENCHMARKS_DIR / ".asv" + print( + f'New ASV results for "{run_type}".\n' + f'See "{publish_subdir}",' + f'\n or JSON files under "{location / "results"}".' + ) + + +class CPerf(_CSPerf): + name = "cperf" + description = _CSPerf.description.format("CPerf") + epilog = _CSPerf.epilog.format("cperf") + + @staticmethod + def func(args: argparse.Namespace) -> None: + _CSPerf.csperf(args, "cperf") + + +class SPerf(_CSPerf): + name = "sperf" + description = _CSPerf.description.format("SPerf") + epilog = _CSPerf.epilog.format("sperf") + + @staticmethod + def func(args: argparse.Namespace) -> None: + _CSPerf.csperf(args, "sperf") + + +class Custom(_SubParserGenerator): + name = "custom" + description = ( + "Run ASV with the input **ASV sub-command**, without any preset " + "arguments - must all be supplied by the user. So just like running " + "ASV manually, with the convenience of re-using the runner's " + "scripted setup steps." + ) + epilog = "e.g. python bm_runner.py custom continuous a1b23d4 HEAD --quick" + + def add_arguments(self) -> None: + self.subparser.add_argument( + "asv_sub_command", + type=str, + help="The ASV command to run.", + ) + + @staticmethod + def func(args: argparse.Namespace) -> None: + _setup_common() + _subprocess_run_asv([args.asv_sub_command, *args.asv_args]) + + +def main(): + parser = ArgumentParser( + description="Run the Iris performance benchmarks (using Airspeed Velocity).", + epilog="More help is available within each sub-command.", + ) + subparsers = parser.add_subparsers(required=True) + + for gen in (Overnight, Branch, CPerf, SPerf, Custom): + _ = gen(subparsers).subparser + + parsed = parser.parse_args() + parsed.func(parsed) + + +if __name__ == "__main__": + main() diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..a0efbb9997 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,9 @@ +coverage: + # see https://docs.codecov.com/docs/commit-status + status: + project: + default: + target: auto + # coverage can drop by up to % while still posting success + threshold: 3% + patch: off diff --git a/docs/Makefile b/docs/Makefile index fcb0ec0116..b6f52f58f9 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,39 +1,29 @@ SUBDIRS = src +help: + @for i in $(SUBDIRS); do \ + echo "make help in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) help); done + html: @for i in $(SUBDIRS); do \ - echo "make html in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html); done + echo "make html in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html); done html-noplot: @for i in $(SUBDIRS); do \ - echo "make html-noplot in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-noplot); done + echo "make html-noplot in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-noplot); done html-noapi: @for i in $(SUBDIRS); do \ - echo "make html-noapi in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-noapi); done + echo "make html-noapi in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-noapi); done html-quick: @for i in $(SUBDIRS); do \ - echo "make html-quick in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-quick); done - -all: - @for i in $(SUBDIRS); do \ - echo "make all in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) all); done - -install: - @for i in $(SUBDIRS); do \ - echo "Installing in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) install); done - -build: - @for i in $(SUBDIRS); do \ - echo "Clearing in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) build); done + echo "make html-quick in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-quick); done clean: @for i in $(SUBDIRS); do \ @@ -42,8 +32,8 @@ clean: doctest: @for i in $(SUBDIRS); do \ - echo "Running doctest in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) doctest); done + echo "Running doctest in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) doctest); done linkcheck: @for i in $(SUBDIRS); do \ @@ -55,3 +45,7 @@ show: echo "Running show in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) show); done +livehtml: + @for i in $(SUBDIRS); do \ + echo "Running show in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) livehtml); done \ No newline at end of file diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 5fba3669b6..6ef6075fb3 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -72,7 +72,7 @@ def main(): # Make an aggregator from the user function. SPELL_COUNT = Aggregator( - "spell_count", count_spells, units_func=lambda units: 1 + "spell_count", count_spells, units_func=lambda units, **kwargs: 1 ) # Define the parameters of the test. diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 2cf42e66e0..c4254ad544 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -78,7 +78,7 @@ def make_plot(projection_name, projection_crs): y_points = y_lower + y_delta * np.concatenate( (zeros, steps, ones, steps[::-1]) ) - # Get the Iris coordinate sytem of the X coordinate (Y should be the same). + # Get the Iris coordinate system of the X coordinate (Y should be the same). cs_data1 = x_coord.coord_system # Construct an equivalent Cartopy coordinate reference system ("crs"). crs_data1 = cs_data1.as_cartopy_crs() diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 8a0c80c707..30975a4828 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -40,7 +40,7 @@ def main(): plt.gca().coastlines() iplt.show() - # Plot #3: Contourf overlayed by coloured point data + # Plot #3: Contourf overlaid by coloured point data plt.figure() qplt.contourf(air_pressure) iplt.points(air_pressure, c=air_pressure.data) diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index e15aa0e6ef..0639c7ac1d 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -5,16 +5,16 @@ This example demonstrates the loading of a lagged ensemble dataset from the GloSea4 model, which is then used to produce two types of plot: - * The first shows the "postage stamp" style image with an array of 14 images, - one for each ensemble member with a shared colorbar. (The missing image in - this example represents ensemble member number 6 which was a failed run) +* The first shows the "postage stamp" style image with an array of 14 images, + one for each ensemble member with a shared colorbar. (The missing image in + this example represents ensemble member number 6 which was a failed run) - * The second plot shows the data limited to a region of interest, in this case - a region defined for forecasting ENSO (El Nino-Southern Oscillation), which, - for the purposes of this example, has had the ensemble mean subtracted from - each ensemble member to give an anomaly surface temperature. In practice a - better approach would be to take the climatological mean, calibrated to the - model, from each ensemble member. +* The second plot shows the data limited to a region of interest, in this case + a region defined for forecasting ENSO (El Nino-Southern Oscillation), which, + for the purposes of this example, has had the ensemble mean subtracted from + each ensemble member to give an anomaly surface temperature. In practice a + better approach would be to take the climatological mean, calibrated to the + model, from each ensemble member. """ @@ -115,7 +115,7 @@ def main(): # Get the time for the entire plot. time = last_time_coord.units.num2date(last_time_coord.bounds[0, 0]) - # Set a global title for the postage stamps with the date formated by + # Set a global title for the postage stamps with the date formatted by # "monthname year". time_string = time.strftime("%B %Y") plt.suptitle(f"Surface temperature ensemble forecasts for {time_string}") diff --git a/docs/src/IEP/IEP001.adoc b/docs/src/IEP/IEP001.adoc index d38b2e8478..e43969f3ce 100644 --- a/docs/src/IEP/IEP001.adoc +++ b/docs/src/IEP/IEP001.adoc @@ -119,7 +119,7 @@ cube.sel(height=1.5) The semantics of position-based slices will continue to match that of normal Python slices. The start position is included, the end position is excluded. -Value-based slices will be stricly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas. +Value-based slices will be strictly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas. Just as for normal Python slices, we do not need to provide the ability to control the include/exclude behaviour for slicing. diff --git a/docs/src/Makefile b/docs/src/Makefile index a75da5371b..8d652878f6 100644 --- a/docs/src/Makefile +++ b/docs/src/Makefile @@ -20,27 +20,18 @@ ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . help: @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " show to open the built documentation in the default browser" - -clean: - -rm -rf $(BUILDDIR) - -rm -rf $(SRCDIR)/generated + @echo " help to view this help" + @echo " html to make standalone HTML files" + @echo " html-noplot to make standalone HTML files, skip gallery" + @echo " html-noapi to make standalone HTML files, skip the API" + @echo " html-quick to make standalone HTML files, skip the gallery and API" + @echo " clean to remove all built files" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " linkcheck to check all external links for integrity" + @echo " show to open the built documentation in the default browser" + @echo " livehtml to auto build the docs when any file changes are detected." + @echo " You need to install sphinx-autobuild first:" + @echo " conda install -c conda-forge sphinx-autobuild" html: $(SPHINXBUILD) $(WARNING_TO_ERROR) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @@ -62,94 +53,23 @@ html-quick: @echo @echo "Build finished. The HTML (no gallery or api docs) pages are in $(BUILDDIR)/html" -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml" - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml" - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files" - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files" - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Iris.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Iris.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Iris" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Iris" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub" - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: latex - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - make -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex" - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man" +clean: + -rm -rf $(BUILDDIR) + -rm -rf $(SRCDIR)/generated -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " + @echo "results in $(BUILDDIR)/doctest/output.txt." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." + @echo "Link check complete; look for any errors in the above output " + @echo "or in $(BUILDDIR)/linkcheck/output.txt." show: @python -c "import webbrowser; webbrowser.open_new_tab('file://$(shell pwd)/$(BUILDDIR)/html/index.html')" +livehtml: + sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) --ignore generated $(O) \ No newline at end of file diff --git a/docs/src/_static/icon_api.svg b/docs/src/_static/icon_api.svg index 841b105973..bf2f8d67bb 100644 --- a/docs/src/_static/icon_api.svg +++ b/docs/src/_static/icon_api.svg @@ -13,14 +13,15 @@ id="Capa_1" x="0px" y="0px" - viewBox="0 0 511 511" - style="enable-background:new 0 0 511 511;" + viewBox="0 0 508 511" xml:space="preserve" - sodipodi:docname="icon_api2.svg" - inkscape:version="0.92.4 (5da689c313, 2019-01-14)">image/svg+xml + inkscape:current-layer="g20" + inkscape:pagecheckerboard="true" /> + style="fill:#1b8fb7;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#1b8fb7;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#1b8fb7;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#1b8fb7;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#1b8fb7;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#cadc6d;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#cadc6d;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#cadc6d;fill-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#cadc6d;fill-opacity:1" + inkscape:connector-curvature="0" /> + \ No newline at end of file diff --git a/docs/src/_static/iris-logo-title.svg b/docs/src/_static/iris-logo-title.svg index 5bc38bfbda..98dd1a73d5 100644 --- a/docs/src/_static/iris-logo-title.svg +++ b/docs/src/_static/iris-logo-title.svg @@ -1,13 +1,13 @@ -

↑ top ↑

- - -""" - - -def lookup_object_type(obj): - if inspect.isclass(obj): - return "class" - elif inspect.isfunction(obj): - return "function" - else: - return None - - -def auto_doc_module( - file_path, import_name, root_package, package_toc=None, title=None -): - doc = r""".. _{import_name}: - -{title_underline} -{title} -{title_underline} - -{sidebar} - -.. currentmodule:: {root_package} - -.. automodule:: {import_name} - -In this module: - -{module_elements} - -""" - - if package_toc: - sidebar = """ -{package_toc_tree} - - """.format( - package_toc_tree=package_toc - ) - else: - sidebar = "" - - try: - mod = __import__(import_name) - except ImportError as e: - message = r""".. error:: - - This module could not be imported. Some dependencies are missing:: - - """ + str( - e - ) - return doc.format( - title=title or import_name, - title_underline="=" * len(title or import_name), - import_name=import_name, - root_package=root_package, - sidebar=sidebar, - module_elements=message, - ) - - mod = sys.modules[import_name] - elems = dir(mod) - - if "__all__" in elems: - document_these = [ - (attr_name, getattr(mod, attr_name)) for attr_name in mod.__all__ - ] - else: - document_these = [ - (attr_name, getattr(mod, attr_name)) - for attr_name in elems - if ( - not attr_name.startswith("_") - and not inspect.ismodule(getattr(mod, attr_name)) - ) - ] - - def is_from_this_module(arg): - # name = arg[0] - obj = arg[1] - return ( - hasattr(obj, "__module__") and obj.__module__ == mod.__name__ - ) - - sort_order = {"class": 2, "function": 1} - - # Sort them according to sort_order dict. - def sort_key(arg): - # name = arg[0] - obj = arg[1] - return sort_order.get(lookup_object_type(obj), 0) - - document_these = filter(is_from_this_module, document_these) - document_these = sorted(document_these, key=sort_key) - - lines = [] - for element, obj in document_these: - object_name = import_name + "." + element - obj_content = document_dict[lookup_object_type(obj)].format( - object_name=object_name, - object_name_header_line="+" * len(object_name), - object_docstring=inspect.getdoc(obj), - ) - lines.append(obj_content) - - lines = horizontal_sep.join(lines) - - module_elements = "\n".join( - " * :py:obj:`{}`".format(element) for element, obj in document_these - ) - - lines = doc + lines - return lines.format( - title=title or import_name, - title_underline="=" * len(title or import_name), - import_name=import_name, - root_package=root_package, - sidebar=sidebar, - module_elements=module_elements, - ) - - -def auto_doc_package(file_path, import_name, root_package, sub_packages): - max_depth = 1 if import_name == "iris" else 2 - package_toc = "\n ".join(sub_packages) - - package_toc = """ - .. toctree:: - :maxdepth: {:d} - :titlesonly: - :hidden: - - {} - - -""".format( - max_depth, package_toc - ) - - if "." in import_name: - title = None - else: - title = import_name.capitalize() + " API" - - return auto_doc_module( - file_path, - import_name, - root_package, - package_toc=package_toc, - title=title, - ) - - -def auto_package_build(app): - root_package = app.config.autopackage_name - if root_package is None: - raise ValueError( - "set the autopackage_name variable in the " "conf.py file" - ) - - if not isinstance(root_package, list): - raise ValueError( - "autopackage was expecting a list of packages to " - 'document e.g. ["itertools"]' - ) - - for package in root_package: - do_package(package) - - -def do_package(package_name): - out_dir = "generated/api" + os.path.sep - - # Import the root package. If this fails then an import error will be - # raised. - module = __import__(package_name) - root_package = package_name - rootdir = os.path.dirname(module.__file__) - - package_folder = [] - module_folders = {} - - for root, subFolders, files in os.walk(rootdir): - for fname in files: - name, ext = os.path.splitext(fname) - - # Skip some non-relevant files. - if ( - fname.startswith(".") - or fname.startswith("#") - or re.search("^_[^_]", fname) - or fname.find(".svn") >= 0 - or not (ext in [".py", ".so"]) - ): - continue - - # Handle new shared library naming conventions - if ext == ".so": - name = name.split(".", 1)[0] - - rel_path = ( - root_package + os.path.join(root, fname).split(rootdir)[-1] - ) - mod_folder = root_package + os.path.join(root).split(rootdir)[ - -1 - ].replace("/", ".") - - # Only add this package to folder list if it contains an __init__ - # script. - if name == "__init__": - package_folder.append([mod_folder, rel_path]) - else: - import_name = mod_folder + "." + name - mf_list = module_folders.setdefault(mod_folder, []) - mf_list.append((import_name, rel_path)) - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - for package, package_path in package_folder: - if "._" in package or "test" in package: - continue - - paths = [] - for spackage, spackage_path in package_folder: - # Ignore this packages, packages that are not children of this - # one, test packages, private packages, and packages that are - # subpackages of subpackages (they'll be part of the subpackage). - if spackage == package: - continue - if not spackage.startswith(package): - continue - if spackage.count(".") > package.count(".") + 1: - continue - if "test" in spackage: - continue - - split_path = spackage.rsplit(".", 2)[-2:] - if any(part[0] == "_" for part in split_path): - continue - - paths.append(os.path.join(*split_path) + ".rst") - - paths.extend( - os.path.join( - os.path.basename(os.path.dirname(path)), - os.path.basename(path).split(".", 1)[0], - ) - for imp_name, path in module_folders.get(package, []) - ) - - paths.sort() - excluded_paths = [item[0] for item in exclude_modules] - - # check for any modules to exclude - for excluded_path in excluded_paths: - if excluded_path in paths: - autolog(f"Excluding module in package: {excluded_path!r}") - paths.remove(excluded_path) - - doc = auto_doc_package(package_path, package, root_package, paths) - - package_dir = out_dir + package.replace(".", os.path.sep) - if not os.path.exists(package_dir): - os.makedirs(out_dir + package.replace(".", os.path.sep)) - - out_path = package_dir + ".rst" - if not os.path.exists(out_path): - autolog("Creating {} ...".format(out_path)) - with open(out_path, "w") as fh: - fh.write(doc) - else: - with open(out_path, "r") as fh: - existing_content = "".join(fh.readlines()) - if doc != existing_content: - autolog("Creating {} ...".format(out_path)) - with open(out_path, "w") as fh: - fh.write(doc) - - excluded_imports = [item[1] for item in exclude_modules] - - for import_name, module_path in module_folders.get(package, []): - # check for any modules to exclude - if import_name in excluded_imports: - autolog(f"Excluding module file: {import_name!r}") - continue - doc = auto_doc_module( - module_path, import_name, root_package - ) - out_path = ( - out_dir - + import_name.replace(".", os.path.sep) - + ".rst" - ) - if not os.path.exists(out_path): - autolog("Creating {} ...".format(out_path)) - with open(out_path, "w") as fh: - fh.write(doc) - else: - with open(out_path, "r") as fh: - existing_content = "".join(fh.readlines()) - if doc != existing_content: - autolog("Creating {} ...".format(out_path)) - with open(out_path, "w") as fh: - fh.write(doc) - - -def setup(app): - app.connect("builder-inited", auto_package_build) - app.add_config_value("autopackage_name", None, "env") diff --git a/docs/src/userguide/citation.rst b/docs/src/userguide/citation.rst index 1498b9dfe1..62af43c94f 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/userguide/citation.rst @@ -15,12 +15,12 @@ For example:: @manual{Iris, author = {{Met Office}}, - title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data }, - edition = {v3.4}, - year = {2010 - 2022}, - address = {Exeter, Devon }, + title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data}, + edition = {v3.6}, + year = {2010 - 2023}, + address = {Exeter, Devon}, url = {http://scitools.org.uk/}, - doi = {10.5281/zenodo.7386117} + doi = {10.5281/zenodo.7948293} } @@ -34,7 +34,7 @@ Suggested format:: For example:: - Iris. v3.4. 1-Dec-2022. Met Office. UK. https://doi.org/10.5281/zenodo.7386117 22-12-2022 + Iris. v3.5. 27-Apr-2023. Met Office. UK. https://doi.org/10.5281/zenodo.7871017 22-12-2022 ******************** diff --git a/docs/src/userguide/concat.png b/docs/src/userguide/concat.png deleted file mode 100644 index eb3d84046e..0000000000 Binary files a/docs/src/userguide/concat.png and /dev/null differ diff --git a/docs/src/userguide/concat.svg b/docs/src/userguide/concat.svg index 0234b37bfa..f32fc0030b 100644 --- a/docs/src/userguide/concat.svg +++ b/docs/src/userguide/concat.svg @@ -9,11 +9,11 @@ xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" - width="750" - height="250" + width="772.70679" + height="285.20804" id="svg2834" version="1.1" - inkscape:version="0.47 r22583" + inkscape:version="0.92.4 (5da689c313, 2019-01-14)" sodipodi:docname="concat.svg" inkscape:export-xdpi="90" inkscape:export-ydpi="90"> @@ -98,7 +98,8 @@ id="path3666" style="font-size:12px;fill-rule:evenodd;stroke-width:0.625;stroke-linejoin:round" d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z" - transform="matrix(-1.1,0,0,-1.1,-1.1,0)" /> + transform="matrix(-1.1,0,0,-1.1,-1.1,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="scale(-0.8)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + borderlayer="true" + fit-margin-top="20" + fit-margin-left="20" + fit-margin-bottom="20" + fit-margin-right="20"> + snapvisiblegridlinesonly="true" + originx="16.413907" + originy="19.718628" /> @@ -443,7 +456,7 @@ image/svg+xml - + @@ -451,10 +464,10 @@ id="layer1" inkscape:label="Layer 1" inkscape:groupmode="layer" - transform="translate(-34.602633,-28.380468)"> + transform="translate(-18.188725,-12.891066)"> x + style="font-size:18.91116524px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">x @@ -471,19 +484,22 @@ inkscape:path-effect="#path-effect3640" id="path3638" d="M 60,420 320,260" - style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" /> + style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 V 50" + style="fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 H 480" + style="fill:none;stroke:#000000;stroke-width:1.16397536px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 V 50" + style="fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 H 480" + style="fill:none;stroke:#000000;stroke-width:1.16397536px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 60.562629,212.61178 H 54.65289" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 60.562629,165.33386 c -5.909739,0 -5.909739,0 -5.909739,0" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 60.562629,118.05595 c -5.909739,0 -5.909739,0 -5.909739,0" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 458.87904,212.61178 H 452.9693" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 458.87904,165.33386 H 452.9693" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 458.87904,118.05595 H 452.9693" + inkscape:connector-curvature="0" /> 0 + style="font-size:18.91116524px;line-height:1.25">0 + inkscape:original-d="m 60.562629,259.88968 c -5.909739,0 -5.909739,0 -5.909739,0 v 0" + inkscape:connector-curvature="0" /> 1 + style="font-size:18.91116524px;line-height:1.25">1 2 + style="font-size:18.91116524px;line-height:1.25">2 3 + style="font-size:18.91116524px;line-height:1.25">3 0 + style="font-size:18.91116524px;line-height:1.25">0 1 + style="font-size:18.91116524px;line-height:1.25">1 2 + style="font-size:18.91116524px;line-height:1.25">2 3 + style="font-size:18.91116524px;line-height:1.25">3 + inkscape:original-d="M 458.87904,259.88968 H 452.9693" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 480.51092,212.61178 600.71279,141.69491 H 770.08815 Z" + sodipodi:nodetypes="cccc" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 480.51092,118.05595 600.71279,47.139078 H 770.08815 L 677.2049,118.05595 Z" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 481.40069,118.38047 V 212.9363 H 664.60263 V 118.38047 Z" + sodipodi:nodetypes="ccccc" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 664.60263,212.9363 770.08815,141.69491 V 47.139078 L 664.60263,118.38047 Z" + sodipodi:nodetypes="ccccc" + inkscape:connector-curvature="0" /> t + style="font-size:18.91116524px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">t y + style="font-size:18.91116524px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">y t + style="font-size:18.91116524px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">t y + style="font-size:18.91116524px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">y + inkscape:original-d="m 386.84486,142.01943 v 11.81948 h 23.63896 v 5.90973 l 17.72922,-11.81947 -17.72922,-11.81948 v 5.90974 z" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 144.54556,118.38048 262.74034,41.553863 h -59.09738" + sodipodi:nodetypes="ccc" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 262.74034,212.9363 V 118.38047 L 380.93512,41.553863 v 94.555827 z" + sodipodi:nodetypes="ccccc" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 86.176265,213.75562 85.448171,118.38048 203.64296,41.553863" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 262.74034,41.553863 H 380.93512" + sodipodi:nodetypes="cc" + inkscape:connector-curvature="0" /> + sodipodi:nodetypes="cc" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 84.558085,118.283 c 0.890086,0.0975 178.182255,0.0975 178.182255,0.0975 v 94.55582 l -176.564075,0.81932 v 0" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 144.54556,118.38048 V 212.9363" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 203.76402,118.82239 -0.12106,94.11391" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 85.636872,117.7436 -0.188701,95.1927" + inkscape:connector-curvature="0" /> diff --git a/docs/src/userguide/cube_diagram.dia b/docs/src/userguide/cube_diagram.dia deleted file mode 100644 index 8edc611782..0000000000 Binary files a/docs/src/userguide/cube_diagram.dia and /dev/null differ diff --git a/docs/src/userguide/cube_diagram.png b/docs/src/userguide/cube_diagram.png deleted file mode 100644 index 80f5328c3b..0000000000 Binary files a/docs/src/userguide/cube_diagram.png and /dev/null differ diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index 08297c2a51..fb389a5229 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -14,7 +14,7 @@ Cube Statistics Collapsing Entire Data Dimensions --------------------------------- -.. testsetup:: +.. testsetup:: collapsing import iris filename = iris.sample_data_path('uk_hires.pp') @@ -86,7 +86,7 @@ we can pass the coordinate name and the aggregation definition to the model_level_number 10, bound=(1, 19) sigma 0.92292976, bound=(0.8458596, 1.0) Cell methods: - mean model_level_number + 0 model_level_number: mean Attributes: STASH m01s00i004 source 'Data from Met Office Unified Model' @@ -125,7 +125,7 @@ in order to calculate the area of the grid boxes:: These areas can now be passed to the ``collapsed`` method as weights: -.. doctest:: +.. doctest:: collapsing >>> new_cube = cube.collapsed(['grid_longitude', 'grid_latitude'], iris.analysis.MEAN, weights=grid_areas) >>> print(new_cube) @@ -141,11 +141,11 @@ These areas can now be passed to the ``collapsed`` method as weights: altitude - x Scalar coordinates: forecast_reference_time 2009-11-19 04:00:00 - grid_latitude 1.5145501 degrees, bound=(0.14430022, 2.8848) degrees - grid_longitude 358.74948 degrees, bound=(357.494, 360.00497) degrees + grid_latitude 1.5145501 degrees, bound=(0.13755022, 2.89155) degrees + grid_longitude 358.74948 degrees, bound=(357.48724, 360.01172) degrees surface_altitude 399.625 m, bound=(-14.0, 813.25) m Cell methods: - mean grid_longitude, grid_latitude + 0 grid_longitude: grid_latitude: mean Attributes: STASH m01s00i004 source 'Data from Met Office Unified Model' @@ -155,6 +155,50 @@ Several examples of area averaging exist in the gallery which may be of interest including an example on taking a :ref:`global area-weighted mean `. +In addition to plain arrays, weights can also be given as cubes or (names of) +:meth:`~iris.cube.Cube.coords`, :meth:`~iris.cube.Cube.cell_measures`, or +:meth:`~iris.cube.Cube.ancillary_variables`. +This has the advantage of correct unit handling, e.g., for area-weighted sums +the units of the resulting cube are multiplied by an area unit: + +.. doctest:: collapsing + + >>> from iris.coords import CellMeasure + >>> cell_areas = CellMeasure( + ... grid_areas, + ... standard_name='cell_area', + ... units='m2', + ... measure='area', + ... ) + >>> cube.add_cell_measure(cell_areas, (0, 1, 2, 3)) + >>> area_weighted_sum = cube.collapsed( + ... ['grid_longitude', 'grid_latitude'], + ... iris.analysis.SUM, + ... weights='cell_area' + ... ) + >>> print(area_weighted_sum) + air_potential_temperature / (m2.K) (time: 3; model_level_number: 7) + Dimension coordinates: + time x - + model_level_number - x + Auxiliary coordinates: + forecast_period x - + level_height - x + sigma - x + Derived coordinates: + altitude - x + Scalar coordinates: + forecast_reference_time 2009-11-19 04:00:00 + grid_latitude 1.5145501 degrees, bound=(0.13755022, 2.89155) degrees + grid_longitude 358.74948 degrees, bound=(357.48724, 360.01172) degrees + surface_altitude 399.625 m, bound=(-14.0, 813.25) m + Cell methods: + 0 grid_longitude: grid_latitude: sum + Attributes: + STASH m01s00i004 + source 'Data from Met Office Unified Model' + um_version '7.3' + .. _cube-statistics-aggregated-by: Partially Reducing Data Dimensions @@ -232,7 +276,7 @@ Printing this cube now shows that two extra coordinates exist on the cube: Scalar coordinates: forecast_period 0 hours Cell methods: - mean month, year + 0 month: year: mean Attributes: Conventions 'CF-1.5' STASH m01s00i024 @@ -338,3 +382,44 @@ from jja-2006 to jja-2010: mam 2010 jja 2010 +Moreover, :meth:`Cube.aggregated_by ` supports +weighted aggregation. +For example, this is helpful for an aggregation over a monthly time +coordinate that consists of months with different numbers of days. +Similar to :meth:`Cube.collapsed `, weights can be +given as arrays, cubes, or as (names of) :meth:`~iris.cube.Cube.coords`, +:meth:`~iris.cube.Cube.cell_measures`, or +:meth:`~iris.cube.Cube.ancillary_variables`. +When weights are not given as arrays, units are correctly handled for weighted +sums, i.e., the original unit of the cube is multiplied by the units of the +weights. +The following example shows a weighted sum (notice the change of the units): + +.. doctest:: aggregation + + >>> from iris.coords import AncillaryVariable + >>> time_weights = AncillaryVariable( + ... cube.coord("time").bounds[:, 1] - cube.coord("time").bounds[:, 0], + ... long_name="Time Weights", + ... units="hours", + ... ) + >>> cube.add_ancillary_variable(time_weights, 0) + >>> seasonal_sum = cube.aggregated_by("clim_season", iris.analysis.SUM, weights="Time Weights") + >>> print(seasonal_sum) + surface_temperature / (3600 s.K) (-- : 4; latitude: 18; longitude: 432) + Dimension coordinates: + latitude - x - + longitude - - x + Auxiliary coordinates: + clim_season x - - + forecast_reference_time x - - + season_year x - - + time x - - + Scalar coordinates: + forecast_period 0 hours + Cell methods: + 0 month: year: mean + 1 clim_season: sum + Attributes: + Conventions 'CF-1.5' + STASH m01s00i024 diff --git a/docs/src/userguide/glossary.rst b/docs/src/userguide/glossary.rst index 818ef0c7ad..5c24f03372 100644 --- a/docs/src/userguide/glossary.rst +++ b/docs/src/userguide/glossary.rst @@ -1,3 +1,5 @@ +.. include:: ../common_links.inc + .. _glossary: Glossary @@ -125,7 +127,7 @@ Glossary of formats. | **Related:** :term:`CartoPy` **|** :term:`NumPy` - | **More information:** `Matplotlib `_ + | **More information:** `matplotlib`_ | Metadata @@ -143,9 +145,11 @@ Glossary When Iris loads this format, it also especially recognises and interprets data encoded according to the :term:`CF Conventions`. + __ `NetCDF4`_ + | **Related:** :term:`Fields File (FF) Format` **|** :term:`GRIB Format` **|** :term:`Post Processing (PP) Format` - | **More information:** `NetCDF-4 Python Git `_ + | **More information:** `NetCDF-4 Python Git`__ | NumPy diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index deae4427ed..cba7d778d5 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -75,8 +75,8 @@ Let's take the air temperature cube we've seen previously: pressure 1000.0 hPa time 1998-12-01 00:00:00, bound=(1994-12-01 00:00:00, 1998-12-01 00:00:00) Cell methods: - mean within years time - mean over years time + 0 time: mean within years + 1 time: mean over years Attributes: STASH m01s16i203 source 'Data from Met Office Unified Model' @@ -94,8 +94,8 @@ We can interpolate specific values from the coordinates of the cube: pressure 1000.0 hPa time 1998-12-01 00:00:00, bound=(1994-12-01 00:00:00, 1998-12-01 00:00:00) Cell methods: - mean within years time - mean over years time + 0 time: mean within years + 1 time: mean over years Attributes: STASH m01s16i203 source 'Data from Met Office Unified Model' diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index 29d8f3cefc..267f97b0fc 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -10,9 +10,7 @@ metadata about a phenomenon. In Iris, a cube is an interpretation of the *Climate and Forecast (CF) Metadata Conventions* whose purpose is to: -.. panels:: - :container: container-lg pb-3 - :column: col-lg-12 p-2 +.. card:: *require conforming datasets to contain sufficient metadata that they are self-describing... including physical units if appropriate, and that each @@ -104,7 +102,7 @@ Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at 4 different longitudes, 2 different latitudes and 3 different heights. Our data array can be represented pictorially: -.. image:: multi_array.png +.. image:: multi_array.svg Where dimensions 0, 1, and 2 have lengths 3, 2 and 4 respectively. @@ -134,7 +132,7 @@ The Iris cube to represent this data would consist of: Pictorially the cube has taken on more information than a simple array: -.. image:: multi_array_to_cube.png +.. image:: multi_array_to_cube.svg Additionally further information may be optionally attached to the cube. diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index 33ad932d70..b71f033c30 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -234,7 +234,7 @@ A single cube is loaded in the following example:: longitude - x ... Cell methods: - mean time + 0 time: mean However, when attempting to load data which would result in anything other than one cube, an exception is raised:: diff --git a/docs/src/userguide/merge.png b/docs/src/userguide/merge.png deleted file mode 100644 index cafaa370da..0000000000 Binary files a/docs/src/userguide/merge.png and /dev/null differ diff --git a/docs/src/userguide/merge.svg b/docs/src/userguide/merge.svg index 9326bc332b..0f0d37a1ca 100644 --- a/docs/src/userguide/merge.svg +++ b/docs/src/userguide/merge.svg @@ -9,11 +9,11 @@ xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" - width="750" - height="250" + width="734.55884" + height="280.94952" id="svg2834" version="1.1" - inkscape:version="0.47 r22583" + inkscape:version="0.92.4 (5da689c313, 2019-01-14)" sodipodi:docname="merge.svg" inkscape:export-xdpi="90" inkscape:export-ydpi="90"> @@ -38,7 +38,8 @@ id="path3666" style="font-size:12px;fill-rule:evenodd;stroke-width:0.625;stroke-linejoin:round" d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z" - transform="matrix(-1.1,0,0,-1.1,-1.1,0)" /> + transform="matrix(-1.1,0,0,-1.1,-1.1,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="scale(-0.8)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + borderlayer="true" + fit-margin-top="20" + fit-margin-left="20" + fit-margin-bottom="20" + fit-margin-right="20"> + snapvisiblegridlinesonly="true" + originx="-4.2538044" + originy="11.570523" /> @@ -394,7 +407,7 @@ image/svg+xml - + @@ -402,10 +415,10 @@ id="layer1" inkscape:label="Layer 1" inkscape:groupmode="layer" - transform="translate(-22.109375,-210.54913)"> + transform="translate(-26.363179,-191.17014)"> y + style="font-size:17.92656898px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">y @@ -422,19 +435,22 @@ inkscape:path-effect="#path-effect3640" id="path3638" d="M 60,420 320,260" - style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" /> + style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 V 50" + style="fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 H 480" + style="fill:none;stroke:#000000;stroke-width:1.16397536px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:1.42801106px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 V 50" + style="fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + d="M 60,420 H 480" + style="fill:none;stroke:#000000;stroke-width:1.16397536px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 67.5722,390.18197 H 61.970147" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 67.5722,345.36554 c -5.602053,0 -5.602053,0 -5.602053,0" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 67.5722,300.54913 c -5.602053,0 -5.602053,0 -5.602053,0" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 445.15053,390.18197 h -5.60205" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 445.15053,345.36554 h -5.60205" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 445.15053,300.54913 h -5.60205" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 88.077823,390.18196 202.02147,322.95733 H 362.5784 l -88.04735,67.22463 z" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 203.20315,211.22392 c 0,134.44926 0,134.44926 0,134.44926" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 88.394922,345.36553 202.33857,278.1409 H 362.8955 l -88.04735,67.22463 z" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 88.077823,300.54913 202.02147,233.3245 H 362.5784 l -88.04735,67.22463 z" + inkscape:connector-curvature="0" /> 0 + style="font-size:17.92656898px;line-height:1.25">0 + inkscape:original-d="m 67.5722,434.99838 c -5.602053,0 -5.602053,0 -5.602053,0 v 0" + inkscape:connector-curvature="0" /> 1 + style="font-size:17.92656898px;line-height:1.25">1 2 + style="font-size:17.92656898px;line-height:1.25">2 3 + style="font-size:17.92656898px;line-height:1.25">3 0 + style="font-size:17.92656898px;line-height:1.25">0 1 + style="font-size:17.92656898px;line-height:1.25">1 2 + style="font-size:17.92656898px;line-height:1.25">2 3 + style="font-size:17.92656898px;line-height:1.25">3 + inkscape:original-d="m 89.980408,278.14092 c 0,134.44926 0,134.44926 0,134.44926" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 274.84815,278.14092 c 0,134.44926 0,134.44926 0,134.44926" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 445.15053,434.99838 h -5.60205" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 465.65616,390.18197 113.94362,-67.22463 h 160.55695 l -88.04736,67.22463 H 465.65615 Z" + inkscape:connector-curvature="0" /> + inkscape:original-d="M 465.65616,300.54913 579.59978,233.3245 h 160.55695 l -88.04736,67.22463 H 465.65615 Z" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 465.65617,300.54913 v 89.63284 H 652.10938 V 300.54913 H 465.65616 Z" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 652.10939,390.18197 88.04735,-67.22463 V 233.3245 l -88.04736,67.22463 v 89.63284 z" + inkscape:connector-curvature="0" /> + inkscape:original-d="m 363.42185,211.17014 c 0,134.44926 0,134.44926 0,134.44926" + inkscape:connector-curvature="0" /> x + style="font-size:17.92656898px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">x z + style="font-size:17.92656898px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">z x + style="font-size:17.92656898px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">x z + style="font-size:17.92656898px;line-height:1.25;font-family:'URW Palladio L';-inkscape-font-specification:'URW Palladio L'">z + inkscape:original-d="m 376.86678,323.26496 v 11.20411 h 22.4082 v 5.60205 l 16.80616,-11.2041 -16.80616,-11.20411 v 5.60205 z" + inkscape:connector-curvature="0" /> diff --git a/docs/src/userguide/merge_and_concat.png b/docs/src/userguide/merge_and_concat.png deleted file mode 100644 index 48238287b4..0000000000 Binary files a/docs/src/userguide/merge_and_concat.png and /dev/null differ diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/userguide/merge_and_concat.rst index b521d49a59..d754e08cc1 100644 --- a/docs/src/userguide/merge_and_concat.rst +++ b/docs/src/userguide/merge_and_concat.rst @@ -16,7 +16,7 @@ issues from occurring. Both ``merge`` and ``concatenate`` take multiple cubes as input and result in fewer cubes as output. The following diagram illustrates the two processes: -.. image:: merge_and_concat.png +.. image:: merge_and_concat.svg :alt: Pictographic of merge and concatenation. :align: center @@ -128,7 +128,7 @@ make a new ``z`` dimension coordinate: The following diagram illustrates what has taken place in this example: -.. image:: merge.png +.. image:: merge.svg :alt: Pictographic of merge. :align: center @@ -294,7 +294,7 @@ cubes to form a new cube with an extended ``t`` coordinate: The following diagram illustrates what has taken place in this example: -.. image:: concat.png +.. image:: concat.svg :alt: Pictographic of concatenate. :align: center diff --git a/docs/src/userguide/multi_array.png b/docs/src/userguide/multi_array.png deleted file mode 100644 index 54a2688d2a..0000000000 Binary files a/docs/src/userguide/multi_array.png and /dev/null differ diff --git a/docs/src/userguide/multi_array.svg b/docs/src/userguide/multi_array.svg index d28f6d71d6..38ba58744f 100644 --- a/docs/src/userguide/multi_array.svg +++ b/docs/src/userguide/multi_array.svg @@ -13,244 +13,250 @@ height="300" id="svg2" version="1.1" - inkscape:version="0.47 r22583" + inkscape:version="0.92.4 (5da689c313, 2019-01-14)" sodipodi:docname="multi_array.svg" inkscape:export-xdpi="90" - inkscape:export-ydpi="90"> + inkscape:export-ydpi="90" + viewBox="0 0 470 320"> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,4.8,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,-4.8,0)" + inkscape:connector-curvature="0" /> + style="font-size:12px;fill-rule:evenodd;stroke-width:0.625;stroke-linejoin:round" + d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z" + transform="matrix(1.1,0,0,1.1,1.1,0)" + inkscape:connector-curvature="0" /> + style="overflow:visible"> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + style="overflow:visible"> + style="font-size:12px;fill-rule:evenodd;stroke-width:0.625;stroke-linejoin:round" + d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z" + transform="matrix(-1.1,0,0,-1.1,-1.1,0)" + inkscape:connector-curvature="0" /> + style="color-interpolation-filters:sRGB"> @@ -322,17 +328,24 @@ + + transform="translate(-22.275317,-306.43968)" + style="display:inline"> + style="opacity:1;fill:none;stroke:#000000;stroke-width:1.23286104;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + d="M 227.83333,331.9969 H 460.74875" + id="path2967" + inkscape:connector-curvature="0" /> 2 + style="font-size:20px;line-height:1.25;font-family:Arial;-inkscape-font-specification:Arial">2 1 + style="font-size:20px;line-height:1.25;font-family:Arial;-inkscape-font-specification:Arial">1 0 + style="font-size:20px;line-height:1.25;font-family:Arial;-inkscape-font-specification:Arial">0 + d="m 35.115947,477.76237 v 104.4593" + style="fill:none;stroke:#000000;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#EmptyTriangleInL)" + inkscape:connector-curvature="0" /> + id="path2965-9-1" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:1.90165818;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#EmptyTriangleOutL)" + d="M 34.20344,581.46304 H 134.30678" + id="path2967-5" + inkscape:connector-curvature="0" /> + d="m 276.64236,373.53613 v 187.701" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 218.10752,373.53612 V 561.23713" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 339.27837,373.53612 V 561.23713" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 164.28582,434.95011 H 397.20125" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 164.28582,500.94163 H 397.20124" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 429.87688,350.79329 V 538.4943" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + d="M 197.00473,350.14458 H 429.92015" + style="fill:#b3b3b3;stroke:#999999;stroke-width:2.03630161;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" + inkscape:connector-curvature="0" /> + id="path2965-9" + inkscape:connector-curvature="0" /> + id="path2965-9-3" + inkscape:connector-curvature="0" /> + id="path2965-9-3-1" + inkscape:connector-curvature="0" /> + style="opacity:1;fill:none;stroke:#000000;stroke-width:1.40908241;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + d="M 460.83518,331.99456 V 519.69557" + id="path3940" + inkscape:connector-curvature="0" /> diff --git a/docs/src/userguide/multi_array_to_cube.png b/docs/src/userguide/multi_array_to_cube.png deleted file mode 100644 index 1144ee6715..0000000000 Binary files a/docs/src/userguide/multi_array_to_cube.png and /dev/null differ diff --git a/docs/src/userguide/multi_array_to_cube.svg b/docs/src/userguide/multi_array_to_cube.svg index a2fc2f5e26..8b0cc529dd 100644 --- a/docs/src/userguide/multi_array_to_cube.svg +++ b/docs/src/userguide/multi_array_to_cube.svg @@ -9,11 +9,11 @@ xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" - width="600" - height="400" + width="588.37256" + height="379.43076" id="svg2" version="1.1" - inkscape:version="0.47 r22583" + inkscape:version="0.92.4 (5da689c313, 2019-01-14)" sodipodi:docname="multi_array_to_cube.svg" inkscape:export-xdpi="90" inkscape:export-ydpi="90"> @@ -28,9 +28,10 @@ style="overflow:visible"> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,4.8,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,-4.8,0)" + inkscape:connector-curvature="0" /> + transform="matrix(1.1,0,0,1.1,1.1,0)" + inkscape:connector-curvature="0" /> + d="M 0,0 5,-5 -12.5,0 5,5 Z" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,-10,0)" + inkscape:connector-curvature="0" /> + transform="matrix(-1.1,0,0,-1.1,-1.1,0)" + inkscape:connector-curvature="0" /> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + style="color-interpolation-filters:sRGB"> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,-4.8,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,-4.8,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(0.8,0,0,0.8,-4.8,0)" + inkscape:connector-curvature="0" /> + d="M 5.77,0 -2.88,5 V -5 Z" + style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;marker-start:none" + transform="matrix(-0.8,0,0,-0.8,4.8,0)" + inkscape:connector-curvature="0" /> + inkscape:guide-bbox="true" + borderlayer="false" + inkscape:pagecheckerboard="true" + showborder="true" + fit-margin-top="10" + fit-margin-left="10" + fit-margin-bottom="10" + fit-margin-right="10" /> @@ -1012,86 +1028,108 @@ + + transform="translate(-24.340681,-213.92624)"> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + d="M 279.33268,257.02949 H 512.2481" + id="path2967" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="m 334.56237,297.14191 v 187.701" + id="path3940-3" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 276.02753,297.1419 V 484.84291" + id="path3940-2" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 397.19838,297.1419 V 484.84291" + id="path3940-38" + inkscape:connector-curvature="0" /> + id="path2965-9-3-8" + inkscape:connector-curvature="0" /> + id="path2965-9-3-7" + inkscape:connector-curvature="0" /> + id="path2965-9-3-9" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 222.20583,358.55589 H 455.12126" + id="path2967-1" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 222.20583,424.54741 H 455.12125" + id="path2967-4" + inkscape:connector-curvature="0" /> + id="path2965-9-3-0" + inkscape:connector-curvature="0" /> + id="path2965-9-3-91" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 487.79689,274.39907 V 462.10008" + id="path3940-6" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330)" + d="M 254.92474,273.75036 H 487.84016" + id="path2967-8" + inkscape:connector-curvature="0" /> + id="path2965-9" + inkscape:connector-curvature="0" /> + id="path2965-9-3" + inkscape:connector-curvature="0" /> + id="path2965-9-3-1" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + d="M 512.33453,257.02715 V 444.72816" + id="path3940" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:4.65429401;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-5-0)" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:4.86589146;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-5-0)" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:4.77046061;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-5-0)" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + d="M 180.66394,434.79892 H 414.8872 m 0.13354,0.19234 23.6025,-16.61686" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + id="path2967-45-1" + inkscape:connector-curvature="0" /> + d="M 205.47014,418.47977 H 438.38556" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:4.46560764;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-2)" + inkscape:connector-curvature="0" /> + style="fill:#b3b3b3;stroke:#999999;stroke-width:4.46560764;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-2)" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + d="M 607.67297,166.52524 V 354.22625" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + d="M 632.44481,148.8327 V 336.23717" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> 2 + style="font-size:20px;line-height:1.25;font-family:Arial;-inkscape-font-specification:Arial">2 + style="fill:none;stroke:#000000;stroke-width:0.72307718;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#EmptyTriangleOutL)" + d="m 218.65167,564.61456 h 36.19122" + id="path2967-5-8" + inkscape:connector-curvature="0" /> -180 90 + style="font-size:16px;line-height:1.25">90 0 + style="font-size:16px;line-height:1.25">0 90 + style="font-size:16px;line-height:1.25">90 Longitude (degrees) + style="font-size:16px;line-height:1.25">Longitude (degrees) Height (meters) + style="font-size:16px;line-height:1.25">Height (meters) + d="m 654.20331,319.24177 h 58.79787" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + d="M 454.91905,335.96264 H 687.83447" + style="fill:#b3b3b3;stroke:#999999;stroke-width:8.0860281;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter4330-0)" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> + d="m 597.7085,358.87212 h 58.79787" + style="fill:none;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-mid:none" + inkscape:connector-curvature="0" /> Latitude (degrees) + style="font-size:16px;line-height:1.25">Latitude (degrees) @@ -1281,9 +1337,9 @@ id="text4028-7" y="378.68555" x="17.929327" - style="font-size:22.62892342px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;font-family:Bitstream Vera Sans" + style="font-style:normal;font-weight:normal;line-height:0%;font-family:'Bitstream Vera Sans';fill:#000000;fill-opacity:1;stroke:none" xml:space="preserve"> + style="fill:none;stroke:#000000;stroke-width:0.99685019;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#EmptyTriangleOutL)" + inkscape:connector-curvature="0" /> -45 + style="font-size:16px;line-height:1.25">-45 45 + style="font-size:16px;line-height:1.25">45 + d="m 588.51962,415.76898 v 48.81029" + style="fill:none;stroke:#000000;stroke-width:0.93453234;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#EmptyTriangleInL)" + inkscape:connector-curvature="0" /> 0 Air temperature (kelvin) + style="font-size:20px;line-height:1.25">Air temperature (kelvin) 2 + style="font-size:16px;line-height:1.25">2 10 + style="font-size:16px;line-height:1.25">10 25 + style="font-size:16px;line-height:1.25">25 diff --git a/docs/src/userguide/plotting_examples/1d_with_legend.py b/docs/src/userguide/plotting_examples/1d_with_legend.py index 626335af45..6b29fc9e76 100644 --- a/docs/src/userguide/plotting_examples/1d_with_legend.py +++ b/docs/src/userguide/plotting_examples/1d_with_legend.py @@ -31,7 +31,7 @@ plt.grid(True) # Provide some axis labels -plt.ylabel("Temerature / kelvin") +plt.ylabel("Temperature / kelvin") plt.xlabel("Longitude / degrees") # And a sensible title diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index 9d66a2f086..ef4de0c429 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -6,6 +6,7 @@ import dask.array as da import iris + from iris.cube import CubeList import numpy as np @@ -188,17 +189,17 @@ coordinates' lazy points and bounds: .. doctest:: - >>> cube = iris.load_cube(iris.sample_data_path('hybrid_height.nc'), 'air_potential_temperature') + >>> cube = iris.load_cube(iris.sample_data_path('orca2_votemper.nc'),'votemper') - >>> dim_coord = cube.coord('model_level_number') + >>> dim_coord = cube.coord('depth') >>> print(dim_coord.has_lazy_points()) False >>> print(dim_coord.has_bounds()) - False + True >>> print(dim_coord.has_lazy_bounds()) False - >>> aux_coord = cube.coord('sigma') + >>> aux_coord = cube.coord('longitude') >>> print(aux_coord.has_lazy_points()) True >>> print(aux_coord.has_bounds()) @@ -213,7 +214,9 @@ coordinates' lazy points and bounds: >>> print(aux_coord.has_lazy_bounds()) True - >>> derived_coord = cube.coord('altitude') + # Fetch a derived coordinate, from a different file: These can also have lazy data. + >>> cube2 = iris.load_cube(iris.sample_data_path('hybrid_height.nc'), 'air_potential_temperature') + >>> derived_coord = cube2.coord('altitude') >>> print(derived_coord.has_lazy_points()) True >>> print(derived_coord.has_bounds()) @@ -221,17 +224,51 @@ coordinates' lazy points and bounds: >>> print(derived_coord.has_lazy_bounds()) True -.. note:: - Printing a lazy :class:`~iris.coords.AuxCoord` will realise its points and bounds arrays! - Dask Processing Options ----------------------- -Iris uses dask to provide lazy data arrays for both Iris cubes and coordinates, -and for computing deferred operations on lazy arrays. +Iris uses `Dask `_ to provide lazy data arrays for +both Iris cubes and coordinates, and for computing deferred operations on lazy arrays. Dask provides processing options to control how deferred operations on lazy arrays are computed. This is provided via the ``dask.set_options`` interface. See the `dask documentation `_ for more information on setting dask processing options. + + +.. _delayed_netcdf_save: + +Delayed NetCDF Saving +--------------------- + +When saving data to NetCDF files, it is possible to *delay* writing lazy content to the +output file, to be performed by `Dask `_ later, +thus enabling parallel save operations. + +This works in the following way : + 1. an :func:`iris.save` call is made, with a NetCDF file output and the additional + keyword ``compute=False``. + This is currently *only* available when saving to NetCDF, so it is documented in + the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. + + 2. the call creates the output file, but does not fill in variables' data, where + the data is a lazy array in the Iris object. Instead, these variables are + initially created "empty". + + 3. the :meth:`~iris.save` call returns a ``result`` which is a + :class:`~dask.delayed.Delayed` object. + + 4. the save can be completed later by calling ``result.compute()``, or by passing it + to the :func:`dask.compute` call. + +The benefit of this, is that costly data transfer operations can be performed in +parallel with writes to other data files. Also, where array contents are calculated +from shared lazy input data, these can be computed in parallel efficiently by Dask +(i.e. without re-fetching), similar to what :meth:`iris.cube.CubeList.realise_data` +can do. + +.. note:: + This feature does **not** enable parallel writes to the *same* NetCDF output file. + That can only be done on certain operating systems, with a specially configured + build of the NetCDF C library, and is not supported by Iris at present. diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst index 7d983448b9..0c99638bbd 100644 --- a/docs/src/voted_issues.rst +++ b/docs/src/voted_issues.rst @@ -20,7 +20,7 @@ the below table. .. raw:: html - +
@@ -42,7 +42,8 @@ the below table. "ajax": 'https://raw.githubusercontent.com/scitools/voted_issues/main/voted-issues.json', "lengthMenu": [10, 25, 50, 100], "pageLength": 10, - "order": [[ 0, "desc" ]] + "order": [[ 0, "desc" ]], + "bJQueryUI": true, } ); } ); diff --git a/docs/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst index 44ebe9ec60..1d7c7c3f60 100644 --- a/docs/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -21,14 +21,14 @@ Features transparent; for example, before the introduction of biggus, MemoryErrors were likely for very large datasets:: - >>> result = extremely_large_cube.collapsed('time', iris.analyis.MEAN) + >>> result = extremely_large_cube.collapsed('time', iris.analysis.MEAN) MemoryError Now, for supported operations, the evaluation is lazy (i.e. it doesn't take place until the actual data is subsequently requested) and can handle data larger than available system memory:: - >>> result = extremely_large_cube.collapsed('time', iris.analyis.MEAN) + >>> result = extremely_large_cube.collapsed('time', iris.analysis.MEAN) >>> print(type(result)) diff --git a/docs/src/whatsnew/2.0.rst b/docs/src/whatsnew/2.0.rst index 400a395e90..4ef50a4101 100644 --- a/docs/src/whatsnew/2.0.rst +++ b/docs/src/whatsnew/2.0.rst @@ -36,7 +36,7 @@ Features * The *new* in-place arithmetic operators :data:`__iadd__`, :data:`__idiv__`, :data:`__imul__`, :data:`__isub__`, and :data:`__itruediv__` have been added to support :class:`~iris.cube.Cube` operations :data:`+=`, - :data:`/=`, :data:`*=`, and :data:`-=`. Note that, for **divison** + :data:`/=`, :data:`*=`, and :data:`-=`. Note that, for **division** *__future__.division* is always in effect. * Changes to the :class:`iris.coords.Coord`: diff --git a/docs/src/whatsnew/2.1.rst b/docs/src/whatsnew/2.1.rst index 18c562d3da..3613bc0c23 100644 --- a/docs/src/whatsnew/2.1.rst +++ b/docs/src/whatsnew/2.1.rst @@ -1,3 +1,5 @@ +.. include:: ../common_links.inc + v2.1 (06 Jun 2018) ****************** @@ -67,7 +69,7 @@ Incompatible Changes as an alternative. * This release of Iris contains a number of updated metadata translations. - See this + See this `changelist `_ for further information. @@ -84,7 +86,7 @@ Internal calendar. * Iris updated its time-handling functionality from the - `netcdf4-python `_ + `netcdf4-python`__ ``netcdftime`` implementation to the standalone module `cftime `_. cftime is entirely compatible with netcdftime, but some issues may @@ -92,6 +94,8 @@ Internal In this situation, simply replacing ``netcdftime.datetime`` with ``cftime.datetime`` should be sufficient. +__ `netCDF4`_ + * Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy. - Full requirements can be seen in the `requirements `_ + Full requirements can be seen in the `requirements`_ directory of the Iris' the source. diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 223ef60011..4107ae5d2b 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -6,10 +6,9 @@ v3.0 (25 Jan 2021) This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` v3.0.0 Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.0.0 Release Highlights + :color: primary + :icon: info :animate: fade-in :open: @@ -42,10 +41,9 @@ This document explains the changes made to Iris for this release v3.0.1 (27 Jan 2021) ==================== -.. dropdown:: :opticon:`alert` v3.0.1 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.0.1 Patches + :color: secondary + :icon: alert :animate: fade-in The patches included in this release include: @@ -61,10 +59,9 @@ v3.0.1 (27 Jan 2021) v3.0.2 (27 May 2021) ==================== -.. dropdown:: :opticon:`alert` v3.0.2 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.0.2 Patches + :color: secondary + :icon: alert :animate: fade-in The patches included in this release include: @@ -115,10 +112,9 @@ v3.0.2 (27 May 2021) v3.0.3 (07 July 2021) ===================== -.. dropdown:: :opticon:`alert` v3.0.3 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.0.3 Patches + :color: secondary + :icon: alert :animate: fade-in The patches included in this release include: @@ -133,10 +129,9 @@ v3.0.3 (07 July 2021) v3.0.4 (22 July 2021) ===================== -.. dropdown:: :opticon:`alert` v3.0.4 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.0.4 Patches + :color: secondary + :icon: alert :animate: fade-in The patches included in this release include: @@ -147,7 +142,7 @@ v3.0.4 (22 July 2021) Firstly, ancillary-variables or cell-measures with long names can now widen the cube "dimensions map" to fit, whereas previously printing these cases caused an Exception. Secondly, cube units are now always printed, whereas previously they were missed out any time that the - "dimensions map" was widened to accomodate long coordinate names. + "dimensions map" was widened to accommodate long coordinate names. (:pull:`4233`)(:pull:`4238`) 💼 **Internal** diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index 1f076572bc..744543f514 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -7,10 +7,9 @@ This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` v3.1.0 Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.1.0 Release Highlights + :color: primary + :icon: info :animate: fade-in :open: diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst index 723f26345e..87a85f9061 100644 --- a/docs/src/whatsnew/3.2.rst +++ b/docs/src/whatsnew/3.2.rst @@ -6,11 +6,9 @@ v3.2 (15 Feb 2022) This document explains the changes made to Iris for this release (:doc:`View all changes `.) - -.. dropdown:: :opticon:`report` v3.2.0 Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.2.0 Release Highlights + :color: primary + :icon: info :animate: fade-in :open: @@ -28,10 +26,9 @@ This document explains the changes made to Iris for this release v3.2.1 (11 Mar 2022) ==================== -.. dropdown:: :opticon:`alert` v3.2.1 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.2.1 Patches + :color: secondary + :icon: alert :animate: fade-in 📢 **Welcome** to `@dennissergeev`_, who made his first contribution to Iris. Nice work! @@ -170,7 +167,7 @@ v3.2.1 (11 Mar 2022) as well as some long-standing bugs with vertical coordinates and number formats. (:pull:`4411`) -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to always return ``None`` if no value match is found. (:pull:`4417`) #. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning diff --git a/docs/src/whatsnew/3.3.rst b/docs/src/whatsnew/3.3.rst index c2e47f298a..4ab5a2e973 100644 --- a/docs/src/whatsnew/3.3.rst +++ b/docs/src/whatsnew/3.3.rst @@ -6,11 +6,9 @@ v3.3 (1 Sep 2022) This document explains the changes made to Iris for this release (:doc:`View all changes `.) - -.. dropdown:: :opticon:`report` v3.3.0 Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: |iris_version| v3.3.0 Release Highlights + :color: primary + :icon: info :animate: fade-in :open: @@ -34,31 +32,30 @@ This document explains the changes made to Iris for this release v3.3.1 (29 Sep 2022) ==================== -.. dropdown:: :opticon:`alert` v3.3.1 Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.3.1 Patches + :color: secondary + :icon: alert :animate: fade-in The patches in this release of Iris include: - #. `@pp-mo`_ fixed the Jupyter notebook display of :class:`~iris.cube.CubeList`. - (:issue:`4973`, :pull:`4976`) + #. `@pp-mo`_ fixed the Jupyter notebook display of :class:`~iris.cube.CubeList`. + (:issue:`4973`, :pull:`4976`) - #. `@pp-mo`_ fixed a bug in NAME loaders where data with no associated statistic would - load as a cube with invalid cell-methods, which cannot be printed or saved to netcdf. - (:issue:`3288`, :pull:`4933`) + #. `@pp-mo`_ fixed a bug in NAME loaders where data with no associated statistic would + load as a cube with invalid cell-methods, which cannot be printed or saved to netcdf. + (:issue:`3288`, :pull:`4933`) - #. `@pp-mo`_ ensured that :data:`iris.cube.Cube.cell_methods` must always be an iterable - of :class:`iris.coords.CellMethod` objects (:pull:`4933`). + #. `@pp-mo`_ ensured that :data:`iris.cube.Cube.cell_methods` must always be an iterable + of :class:`iris.coords.CellMethod` objects (:pull:`4933`). - #. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's - change to default Transverse Mercator projection affects an Iris test. - See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more - details. (:pull:`4992`) + #. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's + change to default Transverse Mercator projection affects an Iris test. + See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more + details. (:pull:`4992`) - #. `@trexfeathers`_ introduced the ``netcdf4!=1.6.1`` pin to avoid a - problem with segfaults. (:pull:`4992`) + #. `@trexfeathers`_ introduced the ``netcdf4!=1.6.1`` pin to avoid a + problem with segfaults. (:pull:`4992`) 📢 Announcements diff --git a/docs/src/whatsnew/3.4.rst b/docs/src/whatsnew/3.4.rst index 1ad676c049..e8d4f0fd2b 100644 --- a/docs/src/whatsnew/3.4.rst +++ b/docs/src/whatsnew/3.4.rst @@ -7,10 +7,9 @@ This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` v3.4.0 Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: v3.4.0 Release Highlights + :color: primary + :icon: info :animate: fade-in :open: @@ -26,15 +25,29 @@ This document explains the changes made to Iris for this release * We have **begun refactoring Iris' regridding**, which has already improved performance and functionality, with more potential in future! * We have made several other significant `🚀 Performance Enhancements`_. - * Please note that **Iris cannot currently work with the latest NetCDF4 - releases**. The pin is set to ``` if you have any issues or feature requests for improving Iris. Enjoy! + +v3.4.1 (21 Feb 2023) +==================== + +.. dropdown:: v3.4.1 Patches + :color: secondary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + #. `@trexfeathers`_ and `@pp-mo`_ made Iris' use of the `netCDF4`_ library + thread-safe. (:pull:`5095`) + + #. `@trexfeathers`_ and `@pp-mo`_ removed the netCDF4 pin mentioned in + `🔗 Dependencies`_ point 3. (:pull:`5095`) + + 📢 Announcements ================ diff --git a/docs/src/whatsnew/3.5.rst b/docs/src/whatsnew/3.5.rst new file mode 100644 index 0000000000..c6699ee842 --- /dev/null +++ b/docs/src/whatsnew/3.5.rst @@ -0,0 +1,214 @@ +.. include:: ../common_links.inc + +v3.5 (27 Apr 2023) +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + +.. dropdown:: v3.5 Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * We added support for plugins. + * We allowed the usage of Iris objects as weights + for cube aggregations. + * We made Iris' use of the `netCDF4`_ library + thread-safe. + * We improved performance by changing the netCDF loader to + fetch data immediately from small netCDF. + variables, instead of creating a dask array. + * We added notes within docstrings clarifying whether operations + maintain lazy data or not. + * We're so proud to fully support `@ed-hawkins`_ and `#ShowYourStripes`_ ❤️ + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Congratulations to `@ESadek-MO`_ who has become a core developer for Iris! 🎉 +#. Welcome and congratulations to `@HGWright`_, `@scottrobinson02`_ and + `@agriyakhetarpal`_ who made their first contributions to Iris! 🎉 + + +✨ Features +=========== + +#. `@bsherratt`_ added support for plugins - see the corresponding + :ref:`documentation page` for further information. + (:pull:`5144`) + +#. `@rcomer`_ enabled lazy evaluation of :obj:`~iris.analysis.RMS` calculations + with weights. (:pull:`5017`) + +#. `@schlunma`_ allowed the usage of cubes, coordinates, cell measures, or + ancillary variables as weights for cube aggregations + (:meth:`iris.cube.Cube.collapsed`, :meth:`iris.cube.Cube.aggregated_by`, and + :meth:`iris.cube.Cube.rolling_window`). This automatically adapts cube units + if necessary. (:pull:`5084`) + +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) added :func:`iris.plot.hist` + and :func:`iris.quickplot.hist`. (:pull:`5189`) + +#. `@tinyendian`_ edited :func:`~iris.analysis.cartography.rotate_winds` to + enable lazy computation of rotated wind vector components (:issue:`4934`, + :pull:`4972`) + +#. `@ESadek-MO`_ updated to the latest CF Standard Names Table v80 + (07 February 2023). (:pull:`5244`) + + +🐛 Bugs Fixed +============= + +#. `@schlunma`_ fixed :meth:`iris.cube.CubeList.concatenate` so that it + preserves derived coordinates. (:issue:`2478`, :pull:`5096`) + +#. `@trexfeathers`_ and `@pp-mo`_ made Iris' use of the `netCDF4`_ library + thread-safe. (:pull:`5095`) + +#. `@ESadek-MO`_ removed check and error raise for saving + cubes with masked :class:`iris.coords.CellMeasure`. + (:issue:`5147`, :pull:`5181`) + +#. `@scottrobinson02`_ fixed :class:`iris.util.new_axis` creating an anonymous new + dimension, when the scalar coord provided is already a dim coord. + (:issue:`4415`, :pull:`5194`) + +#. `@HGWright`_ and `@trexfeathers`_ (reviewer) changed the way + :class:`~iris.coords.CellMethod` are printed to be more CF compliant. + (:pull:`5224`) + +#. `@stephenworsley`_ fixed the way discontiguities were discovered for 2D coords. + Previously, the only bounds being compared were the bottom right bound in one + cell with the bottom left bound in the cell to its right, and the top left bound + in a cell with the bottom left bound in the cell above it. Now all bounds are + compared with all adjacent bounds from neighbouring cells. This affects + :meth:`~iris.coords.Coord.is_contiguous` and :func:`iris.util.find_discontiguities` + where additional discontiguities may be detected which previously were not. + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. `@pp-mo`_ changed the netCDF loader to fetch data immediately from small netCDF + variables, instead of creating a dask array: This saves both time and memory. + Note that some cubes, coordinates etc loaded from netCDF will now have real data + where previously it was lazy. (:pull:`5229`) + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. `@trexfeathers`_ introduced the ``libnetcdf <4.9`` pin. (:pull:`5242`) + + +📚 Documentation +================ + +#. `@rcomer`_ clarified instructions for updating gallery tests. (:pull:`5100`) +#. `@tkknight`_ unpinned ``pydata-sphinx-theme`` and set the default to use + the light version (not dark) while we make the docs dark mode friendly + (:pull:`5129`) + +#. `@jonseddon`_ updated the citation to a more recent version of Iris. (:pull:`5116`) + +#. `@rcomer`_ linked the :obj:`~iris.analysis.PERCENTILE` aggregator from the + :obj:`~iris.analysis.MEDIAN` docstring, noting that the former handles lazy + data. (:pull:`5128`) + +#. `@trexfeathers`_ updated the WSL link to Microsoft's latest documentation, + and removed an ECMWF link in the ``v1.0`` What's New that was failing the + linkcheck CI. (:pull:`5109`) + +#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section, + as a one-stop place to find out about getting involved, and how we relate + to other projects. (:pull:`5025`) + +#. The **Iris community**, with help from the **Xarray community**, produced + the :doc:`/community/iris_xarray` page, highlighting the similarities and + differences between the two packages. (:pull:`5025`) + +#. `@bjlittle`_ added a new section to the `README.md`_ to show our support + for the outstanding work of `@ed-hawkins`_ et al for `#ShowYourStripes`_. + (:pull:`5141`) + +#. `@HGWright`_ fixed some typo's from Gitwash. (:pull:`5145`) + +#. `@Esadek-MO`_ added notes to function docstrings to + to clarify if the function preserves laziness or not. (:pull:`5137`) + +💼 Internal +=========== + +#. `@bouweandela`_ and `@trexfeathers`_ (reviewer) modernized and simplified + the code of ``iris.analysis._Groupby``. (:pull:`5015`) + +#. `@fnattino`_ changed the order of ``ncgen`` arguments in the command to + create NetCDF files for testing (caused errors on OS X). (:pull:`5105`) + +#. `@rcomer`_ removed some old infrastructure that printed test timings. + (:pull:`5101`) + +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) added coverage testing. This + can be enabled by using the "--coverage" flag when running the tests with + nox i.e. ``nox --session tests -- --coverage``. (:pull:`4765`) + +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) removed the ``--coding-tests`` + option from Iris' test runner. (:pull:`4765`) + +#. `@lbdreyer`_ removed the Iris TestRunner. Tests are now run via nox or + pytest. (:pull:`5205`) + +#. `@agriyakhetarpal`_ and `@trexfeathers`_ prevented the GitHub action for + publishing releases to PyPI from running in forks. + (:pull:`5220`, :pull:`5248`) + +#. `@trexfeathers`_ moved the benchmark runner conveniences from ``noxfile.py`` + to a dedicated ``benchmarks/bm_runner.py``. (:pull:`5215`) + +#. `@bjlittle`_ follow-up to :pull:`4972`, enforced ``dask>=2022.09.0`` minimum + pin for first use of `dask.array.ma.empty_like`_ and replaced `@tinyendian`_ + workaround. (:pull:`5225`) + +#. `@HGWright`_, `@bjlittle`_ and `@trexfeathers`_ removed the legacy pin for + ``numpy`` array printing and replaced the test results files to match default + ``numpy`` output. (:pull:`5235`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@fnattino: https://github.com/fnattino +.. _@ed-hawkins: https://github.com/ed-hawkins +.. _@scottrobinson02: https://github.com/scottrobinson02 +.. _@agriyakhetarpal: https://github.com/agriyakhetarpal +.. _@tinyendian: https://github.com/tinyendian + + +.. comment + Whatsnew resources in alphabetical order: + +.. _#ShowYourStripes: https://showyourstripes.info/s/globe/ +.. _README.md: https://github.com/SciTools/iris#----- +.. _dask.array.ma.empty_like: https://docs.dask.org/en/stable/generated/dask.array.ma.empty_like.html diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst new file mode 100644 index 0000000000..151c63ef51 --- /dev/null +++ b/docs/src/whatsnew/3.6.rst @@ -0,0 +1,183 @@ +.. include:: ../common_links.inc + +v3.6 (18 May 2023) +****************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: v3.6 Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + We're so excited about our recent support for **delayed saving of lazy data + to netCDF** (:pull:`5191`) that we're celebrating this important step change + in behavour with its very own dedicated release 🥳 + + By using ``iris.save(..., compute=False)`` you can now save to multiple NetCDF files + in parallel. See the new ``compute`` keyword in :func:`iris.fileformats.netcdf.save`. + This can share and re-use any common (lazy) result computations, and it makes much + better use of resources during any file-system waiting (i.e., it can use such periods + to progress the *other* saves). + + Usage example:: + + # Create output files with delayed data saving. + delayeds = [ + iris.save(cubes, filepath, compute=False) + for cubes, filepath in zip(output_cubesets, output_filepaths) + ] + # Complete saves in parallel. + dask.compute(*delayeds) + + This advance also includes **another substantial benefit**, because NetCDF saves can + now use a + `Dask.distributed scheduler `_. + With `Distributed `_ you can parallelise the + saves across a whole cluster. Whereas previously, the NetCDF saving *only* worked with + a "threaded" scheduler, limiting it to a single CPU. + + We're so super keen for the community to leverage the benefit of this new + feature within Iris that we've brought this release forward several months. + As a result, this minor release of Iris is intentionally light in content. + However, there are some other goodies available for you to enjoy, such as: + + * Performing lazy arithmetic with an Iris :class:`~iris.cube.Cube` and a + :class:`dask.array.Array`, and + * Various improvements to our documentation resulting from adoption of + `sphinx-design`_ and `sphinx-apidoc`_. + + As always, get in touch with us on :issue:`GitHub`, particularly + if you have any feedback with regards to delayed saving, or have any issues + or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. `@bjlittle`_ added the community `Contributor Covenant`_ code of conduct. + (:pull:`5291`) + + +✨ Features +=========== + +#. `@pp-mo`_ and `@lbdreyer`_ supported delayed saving of lazy data, when writing to + the netCDF file format. See :ref:`delayed netCDF saves `. + Also with significant input from `@fnattino`_. + (:pull:`5191`) + +#. `@rcomer`_ tweaked binary operations so that dask arrays may safely be passed + to arithmetic operations and :func:`~iris.util.mask_cube`. (:pull:`4929`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ enabled automatic replacement of a Matplotlib + :class:`~matplotlib.axes.Axes` with a Cartopy + :class:`~cartopy.mpl.geoaxes.GeoAxes` when the ``Axes`` is on a + :class:`~matplotlib.figure.SubFigure`. (:issue:`5282`, :pull:`5288`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) added testing support for python + 3.11. (:pull:`5226`) + +#. `@rcomer`_ dropped support for python 3.8, in accordance with the NEP29_ + recommendations (:pull:`5226`) + +#. `@trexfeathers`_ introduced the ``libnetcdf !=4.9.1`` and ``numpy !=1.24.3`` + pins (:pull:`5274`) + + +📚 Documentation +================ + +#. `@tkknight`_ migrated to `sphinx-design`_ over the legacy `sphinx-panels`_. + (:pull:`5127`) + +#. `@tkknight`_ updated the ``make`` target for ``help`` and added + ``livehtml`` to auto generate the documentation when changes are detected + during development. (:pull:`5258`) + +#. `@tkknight`_ updated the :ref:`installing_from_source` instructions to use + ``pip``. (:pull:`5273`) + +#. `@tkknight`_ removed the legacy custom sphinx extensions that generate the + API documentation. Instead use a less complex approach via + `sphinx-apidoc`_. (:pull:`5264`) + +#. `@trexfeathers`_ re-wrote the :ref:`iris_development_releases` documentation + for clarity, and wrote a step-by-step + :doc:`/developers_guide/release_do_nothing` for the release process. + (:pull:`5134`) + +#. `@trexfeathers`_ and `@tkknight`_ added a dark-mode friendly logo. + (:pull:`5278`) + + +💼 Internal +=========== + +#. `@bjlittle`_ added the `codespell`_ `pre-commit`_ ``git-hook`` to automate + spell checking within the code-base. (:pull:`5186`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) added a `check-manifest`_ + GitHub Action and `pre-commit`_ ``git-hook`` to automate verification + of assets bundled within a ``sdist`` and binary ``wheel`` of our + `scitools-iris`_ PyPI package. (:pull:`5259`) + +#. `@rcomer`_ removed a now redundant copying workaround from Resolve testing. + (:pull:`5267`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) migrated ``setup.cfg`` to + ``pyproject.toml``, as motivated by `PEP-0621`_. (:pull:`5262`) + +#. `@bjlittle`_ adopted `pypa/build`_ recommended best practice to build a + binary ``wheel`` from the ``sdist``. (:pull:`5266`) + +#. `@trexfeathers`_ enabled on-demand benchmarking of Pull Requests; see + :ref:`here `. (:pull:`5286`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@fnattino: https://github.com/fnattino + + +.. comment + Whatsnew resources in alphabetical order: + +.. _sphinx-panels: https://github.com/executablebooks/sphinx-panels +.. _sphinx-design: https://github.com/executablebooks/sphinx-design +.. _check-manifest: https://github.com/mgedmin/check-manifest +.. _PEP-0621: https://peps.python.org/pep-0621/ +.. _pypa/build: https://pypa-build.readthedocs.io/en/stable/ +.. _NEP29: https://numpy.org/neps/nep-0029-deprecation_policy.html +.. _Contributor Covenant: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ \ No newline at end of file diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index 005fac70c4..dce7458a13 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -12,6 +12,8 @@ What's New in Iris :hidden: latest.rst + 3.6.rst + 3.5.rst 3.4.rst 3.3.rst 3.2.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a38e426e6a..0e2896b7a1 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -7,16 +7,15 @@ This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` |iris_version| Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info :animate: fade-in :open: The highlights for this major/minor release of Iris include: - * We're so proud to fully support `@ed-hawkins`_ and `#ShowYourStripes`_ ❤️ + * N/A And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! @@ -25,16 +24,14 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. Congratulations to `@ESadek-MO`_ who has become a core developer for Iris! 🎉 -#. Welcome and congratulations to `@HGWright`_ for making his first contribution to Iris! 🎉 +#. N/A ✨ Features =========== -#. `@bsherratt`_ added support for plugins - see the corresponding - :ref:`documentation page` for further information. - (:pull:`5144`) +#. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles + lazy data. (:pull:`5307`) 🐛 Bugs Fixed @@ -52,8 +49,8 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. N/A - +#. `@rcomer`_ made :meth:`~iris.cube.Cube.aggregated_by` faster. (:pull:`4970`) +#. `@rsdavies`_ modified the CF compliant standard name for m01s00i023 :issue:`4566` 🔥 Deprecations =============== @@ -70,54 +67,26 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. `@rcomer`_ clarified instructions for updating gallery tests. (:pull:`5100`) -#. `@tkknight`_ unpinned ``pydata-sphinx-theme`` and set the default to use - the light version (not dark) while we make the docs dark mode friendly - (:pull:`5129`) - -#. `@jonseddon`_ updated the citation to a more recent version of Iris. (:pull:`5116`) - -#. `@rcomer`_ linked the :obj:`~iris.analysis.PERCENTILE` aggregator from the - :obj:`~iris.analysis.MEDIAN` docstring, noting that the former handles lazy - data. (:pull:`5128`) - -#. `@trexfeathers`_ updated the WSL link to Microsoft's latest documentation, - and removed an ECMWF link in the ``v1.0`` What's New that was failing the - linkcheck CI. (:pull:`5109`) +#. `@tkknight`_ prepared the documentation for dark mode and enable the option + to use it. By default the theme will be based on the users system settings, + defaulting to ``light`` if no system setting is found. (:pull:`5299`) -#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section, - as a one-stop place to find out about getting involved, and how we relate - to other projects. (:pull:`5025`) - -#. The **Iris community**, with help from the **Xarray community**, produced - the :doc:`/community/iris_xarray` page, highlighting the similarities and - differences between the two packages. (:pull:`5025`) - -#. `@bjlittle`_ added a new section to the `README.md`_ to show our support - for the outstanding work of `@ed-hawkins`_ et al for `#ShowYourStripes`_. - (:pull:`5141`) - -#. `@HGWright`_ fixed some typo's from Gitwash. (:pull:`5145`) 💼 Internal =========== -#. `@fnattino`_ changed the order of ``ncgen`` arguments in the command to - create NetCDF files for testing (caused errors on OS X). (:pull:`5105`) - -#. `@rcomer`_ removed some old infrastructure that printed test timings. - (:pull:`5101`) +#. `@pp-mo`_ supported loading and saving netcdf :class:`netCDF4.Dataset` compatible + objects in place of file-paths, as hooks for a forthcoming + `"Xarray bridge" `_ facility. + (:pull:`5214`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: +.. _@rsdavies: https://github.com/rsdavies + -.. _@fnattino: https://github.com/fnattino -.. _@ed-hawkins: https://github.com/ed-hawkins .. comment Whatsnew resources in alphabetical order: - -.. _#ShowYourStripes: https://showyourstripes.info/s/globe/ -.. _README.md: https://github.com/SciTools/iris#----- diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template index a0ce415a65..966a91e976 100644 --- a/docs/src/whatsnew/latest.rst.template +++ b/docs/src/whatsnew/latest.rst.template @@ -7,10 +7,9 @@ This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` |iris_version| Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info :animate: fade-in :open: @@ -22,29 +21,27 @@ This document explains the changes made to Iris for this release any issues or feature requests for improving Iris. Enjoy! -NOTE: section below is a template for bugfix patches +NOTE: section BELOW is a template for bugfix patches ==================================================== - (Please remove this section when creating an initial 'latest.rst') + (Please remove this section when creating an initial 'latest.rst' -v3.X.X (DD MMM YYYY) -==================== +|iris_version| |build_date| +=========================== -.. dropdown:: :opticon:`alert` v3.X.X Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light +.. dropdown:: |iris_version| Patches + :color: primary + :icon: alert :animate: fade-in The patches in this release of Iris include: #. N/A -NOTE: section above is a template for bugfix patches +NOTE: section ABOVE is a template for bugfix patches ==================================================== (Please remove this section when creating an initial 'latest.rst') - 📢 Announcements ================ @@ -108,4 +105,3 @@ NOTE: section above is a template for bugfix patches .. comment Whatsnew resources in alphabetical order: - diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index 9c5fcd9cf0..3b145ae86e 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,11 +1,18 @@ - 79 - 2022-03-19T15:25:54Z + 81 + 2023-04-25T10:43:33Z Centre for Environmental Data Analysis support@ceda.ac.uk + + 1 + + + Acoustic area backscattering strength is 10 times the log10 of the ratio of the area backscattering coefficient to the reference value, 1 (m2 m-2). Area backscattering coefficient is the integral of the volume backscattering coefficient over a defined distance. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. For further details see MacLennan et. al (2002) doi:10.1006/jmsc.2001.1158. + + s @@ -13,6 +20,20 @@ The quantity with standard name acoustic_signal_roundtrip_travel_time_in_sea_water is the time taken for an acoustic signal to propagate from the emitting instrument to a reflecting surface and back again to the instrument. In the case of an instrument based on the sea floor and measuring the roundtrip time to the sea surface, the data are commonly used as a measure of ocean heat content. + + 1 + + + Target strength is 10 times the log10 of the ratio of backscattering cross-section to the reference value, 1 m2. Backscattering cross-section is a parameter computed from the intensity of the backscattered sound wave relative to the intensity of the incident sound wave. For further details see MacLennan et. al (2002) doi:10.1006/jmsc.2001.1158. + + + + 1 + + + Acoustic volume backscattering strength is 10 times the log10 of the ratio of the volume backscattering coefficient to the reference value, 1 m-1. Volume backscattering coefficient is the integral of the backscattering cross-section divided by the volume sampled. Backscattering cross-section is a parameter computed from the intensity of the backscattered sound wave relative to the intensity of the incident sound wave. The parameter is computed to provide a measurement that is proportional to biomass density per unit volume in the field of fisheries acoustics. For further details see MacLennan et. al (2002) doi:10.1006/jmsc.2001.1158. + + m @@ -27,6 +48,13 @@ The "aerodynamic_resistance" is the resistance to mixing through the boundary layer toward the surface by means of the dominant process, turbulent transport. Reference: Wesely, M. L., 1989, doi:10.1016/0004-6981(89)90153-4. + + 1 + + + A variable with the standard_name of aerosol_type_in_atmosphere_layer_in_air contains either strings which indicate the type of the aerosol determined following a certain aerosol typing schema, or flags which can be translated to strings using flag_values and flag_meanings attributes. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). + + year @@ -237,6 +265,20 @@ Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. + + m + + + The altitude at top of atmosphere boundary layer is the elevation above sea level of the top of the (atmosphere) planetary boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the backscattering is obtained through ranging techniques like lidar and radar. + + + + m + + + The altitude at top of atmosphere mixed layer is the elevation above sea level of the top of the (atmosphere) mixed layer or convective boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the volume backscattering coefficient is obtained through ranging techniques like lidar and radar. + + m @@ -300,6 +342,13 @@ The "Angstrom exponent" appears in the formula relating aerosol optical thickness to the wavelength of incident radiation: T(lambda) = T(lambda0) * [lambda/lambda0] ** (-1 * alpha) where alpha is the Angstrom exponent, lambda is the wavelength of incident radiation, lambda0 is a reference wavelength, T(lambda) and T(lambda0) are the values of aerosol optical thickness at wavelengths lambda and lambda0, respectively. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + + 1 + + + The Angstrom exponent of volume backwards scattering is the Angstrom exponent related only to the aerosol backwards scattering component. It is alpha in the following equation relating volume backwards scattering (back) at the wavelength lambda to volume backwards scattering at a different wavelength lambda0: back(lambda) = back(lambda0) * [lambda/lambda0] ** (-1 * alpha). "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + K @@ -2568,6 +2617,13 @@ "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Burned area" means the area of burned vegetation. + + 1 + + + The Canadian Fire Weather Index (CFWI) is a numerical rating of potential frontal fire intensity from the Canadian Forest Fire Index System. It indicates fire intensity by combining the rate of spread with the amount of fuel being consumed and is also used for general public information about fire danger conditions. It is a function of wind speed, temperature, relative humidity, and precipitation. The calculation accounts for multiple layers of flammable material on the ground as well as fine fuels above the surface, combined with the expected rate of spread of fire. The index is open ended. + + 1 @@ -2932,6 +2988,13 @@ cloud_top refers to the top of the highest cloud. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. + + 1 + + + A variable with the standard_name of cloud_type contains either strings which indicate the cloud type, or flags which can be translated to strings using flag_values and flag_meanings attributes. + + m-3 @@ -2946,6 +3009,20 @@ "Compressive strength" is a measure of the capacity of a material to withstand compressive forces. If compressive forces are exerted on a material in excess of its compressive strength, fracturing will occur. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + + Pa + + + The maximum force applied as axial strain to an unconfined frozen soil sample before failure. + + + + Pa + + + The maximum force applied as axial strain to an unconfined soil sample before failure. + + 1 @@ -3086,6 +3163,13 @@ Covariance refers to the sample covariance rather than the population covariance. The quantity with standard name covariance_over_longitude_of_northward_wind_and_air_temperature is the covariance of the deviations of meridional air velocity and air temperature about their respective zonal mean values. The data variable must be accompanied by a vertical coordinate variable or scalar coordinate variable and is calculated on an isosurface of that vertical coordinate. "Northward" indicates a vector component which is positive when directed northward (negative southward). Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). Air temperature is the bulk temperature of the air, not the surface (skin) temperature. + + 1 + + + The phrase "ratio_of_X_to_Y" means X/Y. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Also known as specific gravity, where soil represents a dry soil sample. The density of a substance is its mass per unit volume. + + m @@ -3632,6 +3716,13 @@ Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. Spherical irradiance is the radiation incident on unit area of a hemispherical (or "2-pi") collector. It is sometimes called "scalar irradiance". The direction (up/downwelling) is specified. Radiation incident on a 4-pi collector has standard names of "omnidirectional spherical irradiance". A coordinate variable for radiation wavelength should be given the standard name radiation_wavelength. + + kg m-2 + + + The quantity with standard name drainage_amount_through_base_of_soil_model is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. “Drainage” is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. A vertical coordinate variable or scalar coordinate with standard name "depth" should be used to specify the depth to which the soil column extends. + + 1 @@ -3653,6 +3744,13 @@ "Content" indicates a quantity per unit area. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Dry energy is the sum of dry static energy and kinetic energy. Dry static energy is the sum of enthalpy and potential energy (itself the sum of gravitational and centripetal potential energy). Enthalpy can be written either as (1) CpT, where Cp is heat capacity at constant pressure, T is absolute temperature, or (2) U+pV, where U is internal energy, p is pressure and V is volume. + + kg m-3 + + + The density of the soil after oven drying until constant mass is reached. Volume is determined from the field sample volume. The density of a substance is its mass per unit volume. + + J m-2 @@ -3968,6 +4066,13 @@ The diameter of an aerosol particle as selected by its electrical mobility. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + + 1 + + + Isotopic enrichment of 13C, often called delta 13C, is a measure of the ratio of stable isotopes 13C:12C. It is a parameterisation of the 13C/12C isotopic ratio in the sample with respect to the isotopic ratio in a reference standard (in this case Vienna Pee Dee Belemnite). It is computed using the formula (((13C/12C)sample / (13C/12C)standard) - 1) * 1000. Particulate means suspended solids of all sizes. + + 1e-3 @@ -3975,6 +4080,13 @@ Isotopic enrichment of 14C, often called d14C or delta14C (lower case delta), is used to calculate the fossil fuel contribution to atmospheric carbon dioxide using isotopic ratios of carbon. It is a parameterisation of the 14C/12C isotopic ratio in the sample with respect to the isotopic ratio in a reference standard. It is computed using the formula (((14C/12C)sample / (14C/12C)standard) - 1) * 1000. The quantity called D14C, or Delta14C (upper case delta) is d14C corrected for isotopic fractionation using the 13C/12C ratio as follows: D14C = d14C - 2(dC13 + 25)(1+d14C/1000). If the sample is enriched in 14C relative to the standard, then the data value is positive. Reference: Stuiver, M. and H.A. Polach, 1977, Discussion reporting of 14C data, Radiocarbon, Volume 19, No. 3, 355-363, doi: 10.1017/S0033822200003672. The reference standard used in the calculation of delta14C should be specified by attaching a long_name attribute to the data variable. "C" means the element carbon and "14C" is the radioactive isotope "carbon-14", having six protons and eight neutrons and used in radiocarbon dating. + + 1 + + + Isotopic enrichment of 15N, often called delta 15N, is a measure of the ratio of stable isotopes 15N:14N. It is a parameterisation of the 15N/14N isotopic ratio in the sample with respect to the isotopic ratio in a reference standard (in this case atmospheric nitrogen). It is computed using the formula (((15N/14N)sample / (15N/14N)standard) - 1) * 1000. Particulate means suspended solids of all sizes. + + J m-2 @@ -4164,6 +4276,13 @@ A lightning flash is a compound event, usually consisting of several discharges. Frequency is the number of oscillations of a wave, or the number of occurrences of an event, per unit time. + + kg m-3 + + + The density of the soil in its naturally frozen condition. Also known as frozen bulk density. The density of a substance is its mass per unit volume. + + kg m-2 @@ -4381,6 +4500,20 @@ The ground_level_altitude is the geometric height of the upper boundary of the solid Earth above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. + + degree + + + The slope angle is the angle (in degrees) measured between the ground (earth) surface plane and a flat, horizontal surface. + + + + degree + + + Commonly known as aspect, it is the azimuth (in degrees) of a terrain slope, taken as the direction with the greatest downslope change in elevation on the ground (earth) surface. The direction is a bearing in the usual geographical sense, measured positive clockwise from due north. + + 1 @@ -7412,6 +7545,13 @@ "shortwave" means shortwave radiation. Radiance is the radiative flux in a particular direction, per unit of solid angle. If radiation is isotropic, the radiance is independent of direction, so the direction should not be specified. If the radiation is directionally dependent, a standard name of upwelling or downwelling radiance should be chosen instead. + + 1 + + + The Keetch Byram Drought Index (KBDI) is a numerical drought index ranging from 0 to 800 that estimates the cumulative moisture deficiency in soil. It is a cumulative index. It is a function of maximum temperature and precipitation over the previous 24 hours. + + J m-2 @@ -7804,6 +7944,20 @@ A quality flag that reports the result of the Location test, which checks that a location is within reasonable bounds. The linkage between the data variable and this variable is achieved using the ancillary_variables attribute. There are standard names for other specific quality tests which take the form of X_quality_flag. Quality information that does not match any of the specific quantities should be given the more general standard name of quality_flag. + + m-3 + + + The aerosol particle number size distribution is the number concentration of aerosol particles, normalised to the decadal logarithmic size interval the concentration applies to, as a function of particle diameter. A coordinate variable with the standard name of electrical_mobility_particle_diameter, aerodynamic_particle_diameter, or optical_particle_diameter should be specified to indicate that the property applies at specific particle sizes selected by the indicated method. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. "log10_X" means common logarithm (i.e. base 10) of X. "stp" means standard temperature (0 degC) and pressure (101325 Pa). + + + + m-3 + + + The aerosol particle number size distribution is the number concentration of aerosol particles, normalised to the decadal logarithmic size interval the concentration applies to, as a function of particle diameter. A coordinate variable with the standard name of electrical_mobility_particle_diameter, aerodynamic_particle_diameter, or optical_particle_diameter should be specified to indicate that the property applies at specific particle sizes selected by the indicated method. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. "log10_X" means common logarithm (i.e. base 10) of X. + + m-3 @@ -7811,6 +7965,13 @@ The cloud condensation nuclei number size distribution is the number concentration of aerosol particles, normalised to the decadal logarithmic size interval the concentration applies to, as a function of particle diameter, where the particle acts as condensation nucleus for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. A coordinate variable with the standard name of electrical_mobility_particle_diameter should be specified to indicate that the property applies at specific mobility particle sizes. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. "log10_X" means common logarithm (i.e. base 10) of X. "stp" means standard temperature (0 degC) and pressure (101325 Pa). + + m-3 + + + The cloud condensation nuclei number size distribution is the number concentration of aerosol particles, normalised to the decadal logarithmic size interval the concentration applies to, as a function of particle diameter, where the particle acts as condensation nucleus for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. A coordinate variable with the standard name of electrical_mobility_particle_diameter should be specified to indicate that the property applies at specific mobility particle sizes. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. "log10_X" means common logarithm (i.e. base 10) of X. + + degree_east @@ -8028,6 +8189,34 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19'-hexanoyloxyfucoxanthin is C48H68O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/HEXAXXXX/2/. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. The absorption equivalent black carbon mass concentration is obtained by conversion from the particle light absorption coefficient with a suitable mass absorption cross-section. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The absorption equivalent black carbon mass concentration is obtained by conversion from the particle light absorption coefficient with a suitable mass absorption cross-section. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm1 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 1 micrometer. The absorption equivalent black carbon mass concentration is obtained by conversion from the particle light absorption coefficient with a suitable mass absorption cross-section. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. The absorption equivalent black carbon mass concentration is obtained by conversion from the particle light absorption coefficient with a suitable mass absorption cross-section. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + kg m-3 @@ -8238,6 +8427,34 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for carbon dioxide is CO2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. Chemically, "carbon" is the total sum of elemental, organic, and inorganic carbon. In measurements of carbonaceous aerosols, inorganic carbon is neglected and its mass is assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. Chemically, "carbon" is the total sum of elemental, organic, and inorganic carbon. In measurements of carbonaceous aerosols, inorganic carbon is neglected and its mass is assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm1 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 1 micrometer. Chemically, "carbon" is the total sum of elemental, organic, and inorganic carbon. In measurements of carbonaceous aerosols, inorganic carbon is neglected and its mass is assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. Chemically, "carbon" is the total sum of elemental, organic, and inorganic carbon. In measurements of carbonaceous aerosols, inorganic carbon is neglected and its mass is assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + kg m-3 @@ -8350,6 +8567,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. The chemical formula of chlorophyll c3 is C36H44MgN4O7. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLC03PX/2/. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. + + kg m-3 @@ -8490,6 +8714,34 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol takes up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the aerosol. "Dry aerosol particles" means aerosol particles without any water uptake. Chemically, "elemental carbon" is the carbonaceous fraction of particulate matter that is thermally stable in an inert atmosphere to high temperatures near 4000K and can only be gasified by oxidation starting at temperatures above 340 C. It is assumed to be inert and non-volatile under atmospheric conditions and insoluble in any solvent (Ogren and Charlson, 1983). + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. Chemically, "elemental carbon" is the carbonaceous fraction of particulate matter that is thermally stable in an inert atmosphere to high temperatures near 4000K and can only be gasified by oxidation starting at temperatures above 340 C. It is assumed to be inert and non-volatile under atmospheric conditions and insoluble in any solvent (Ogren and Charlson, 1983). In measurements of carbonaceous aerosols, elemental carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. Chemically, "elemental carbon" is the carbonaceous fraction of particulate matter that is thermally stable in an inert atmosphere to high temperatures near 4000K and can only be gasified by oxidation starting at temperatures above 340 C. It is assumed to be inert and non-volatile under atmospheric conditions and insoluble in any solvent (Ogren and Charlson, 1983). In measurements of carbonaceous aerosols, elemental carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm1 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 1 micrometer. Chemically, "elemental carbon" is the carbonaceous fraction of particulate matter that is thermally stable in an inert atmosphere to high temperatures near 4000K and can only be gasified by oxidation starting at temperatures above 340 C. It is assumed to be inert and non-volatile under atmospheric conditions and insoluble in any solvent (Ogren and Charlson, 1983). In measurements of carbonaceous aerosols, elemental carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. Chemically, "elemental carbon" is the carbonaceous fraction of particulate matter that is thermally stable in an inert atmosphere to high temperatures near 4000K and can only be gasified by oxidation starting at temperatures above 340 C. It is assumed to be inert and non-volatile under atmospheric conditions and insoluble in any solvent (Ogren and Charlson, 1983). In measurements of carbonaceous aerosols, elemental carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + kg m-3 @@ -8903,6 +9155,34 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. Chemically, "organic carbon aerosol" refers to the carbonaceous fraction of particulate matter contained in any of the vast number of compounds where carbon is chemically combined with hydrogen and other elements like O, S, N, P, Cl, etc. In measurements of carbonaceous aerosols, organic carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. Chemically, "organic carbon aerosol" refers to the carbonaceous fraction of particulate matter contained in any of the vast number of compounds where carbon is chemically combined with hydrogen and other elements like O, S, N, P, Cl, etc. In measurements of carbonaceous aerosols, organic carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm1 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 1 micrometer. Chemically, "organic carbon aerosol" refers to the carbonaceous fraction of particulate matter contained in any of the vast number of compounds where carbon is chemically combined with hydrogen and other elements like O, S, N, P, Cl, etc. In measurements of carbonaceous aerosols, organic carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. Chemically, "organic carbon aerosol" refers to the carbonaceous fraction of particulate matter contained in any of the vast number of compounds where carbon is chemically combined with hydrogen and other elements like O, S, N, P, Cl, etc. In measurements of carbonaceous aerosols, organic carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + kg m-3 @@ -10555,6 +10835,13 @@ "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic compounds as a group, as well as those for individual species. + + 1 + + + The quantity with standard name mass_ratio_of_moisture_to_dry_soil is also known as the water content of a soil or the wet-basis gravimetric moisture content. It is the ratio of the mass of water (liquid and solid) to the mass of the dried sample. The phrase "ratio_of_X_to_Y" means X/Y. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + + s-1 @@ -10597,6 +10884,20 @@ Depth is the vertical distance below the surface. 'Undersaturation' means that a solution is unsaturated with respect to a solute. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. The "minimum depth of undersaturation", sometimes called the "saturation horizon", is the shallowest depth at which a body of water is an undersaturated solution of a named solute. + + 1 + + + The phrase "ratio_of_X_to_Y" means X/Y. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. It is the lower limit of the water content at which a 3 mm diameter cylindrical soil sample will break in 3 to 10 mm pieces. It is the lower limit of the plastic state, which has the liquid limit as the upper bound. Known as the plastic limit. + + + + 1 + + + The phrase "ratio_of_X_to_Y" means X/Y. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. It is the lower limit of the water content at which a soil sample will flow in a viscous manner. Known as the liquid limit. + + W m-2 @@ -10863,6 +11164,13 @@ Model level number should be understood as equivalent to layer number. + + 1 + + + The modified Fosberg Fire Weather Index (mFFWI) is a measure of the potential effect of weather conditions on wildland fire. The Fosberg Fire Weather Index is a function of temperature, wind, and humidity. It is modified with a fuel availability factor based on the Keetch Byram Drought Index. + + kg m-2 @@ -11346,6 +11654,13 @@ "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved nitrogen" means the sum of all nitrogen in solution: inorganic nitrogen (nitrite, nitrate and ammonium) plus nitrogen in carbon compounds. + + mol m-3 + + + The sum of dissolved organic carbon-13 component concentrations. "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "C" means the element carbon and "13C" is the stable isotope "carbon-13", having six protons and seven neutrons. + + mol m-3 @@ -11430,6 +11745,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for mercury is Hg. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of guanosine triphosphate is C10H16N5O14P3. + + mol m-3 @@ -12165,6 +12487,20 @@ Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. The chemical formula of bromine nitrate is BrONO2. + + mol mol-1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for bromochloromethane is CH2BrCl. The IUPAC name is bromochloromethane. + + + + mol mol-1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for bromodichloromethane is CHBrCl2. The IUPAC name is bromodichloromethane. + + 1 @@ -12298,6 +12634,20 @@ "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Clox" describes a family of chemical species consisting of inorganic chlorine compounds with the exception of hydrogen chloride (HCl) and chlorine nitrate (ClONO2). "Clox" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity with a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Inorganic chlorine", sometimes referred to as Cly, describes a family of chemical species which result from the degradation of source gases containing chlorine (CFCs, HCFCs, VSLS) and natural inorganic chlorine sources such as sea salt and other aerosols. Standard names that use the term "inorganic_chlorine" are used for quantities that contain all inorganic chlorine species including HCl and ClONO2. + + mol mol-1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for dibromochloromethane is CHBr2Cl. The IUPAC name is dibromochloromethane. + + + + mol mol-1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for dibromomethane is CH2Br2. The IUPAC name is dibromomethane. + + 1 @@ -12886,6 +13236,13 @@ "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. + + mol mol-1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for tribromomethane is CHBr3. The IUPAC name is tribromomethane. + + 1 @@ -12921,6 +13278,13 @@ The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for hydrogen peroxide is H2O2. + + mol kg-1 @@ -12942,6 +13306,13 @@ moles_of_X_per_unit_mass_inY is also called "molality" of X in Y, where X is a material constituent of Y. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for nitrous oxide is N2O. The chemical formula for nitrous oxide is N2O. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/DN2OZZ01/. + + mol kg-1 @@ -12949,6 +13320,20 @@ moles_of_X_per_unit_mass_inY is also called "molality" of X in Y, where X is a material constituent of Y. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Particulate means suspended solids of all sizes. Biogenic silica is a hydrated form of silica (silicon dioxide) with the chemical formula SiO2.nH2O sometimes referred to as opaline silica or opal. It is created by biological processes and in sea water it is predominantly the skeletal material of diatoms. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Particulate means suspended solids of all sizes. Particulate inorganic carbon is carbon bound in molecules ionically that may be liberated from the particles as carbon dioxide by acidification. + + mol kg-1 @@ -13166,6 +13551,41 @@ A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "shortwave" means shortwave radiation. "Upward" indicates a vector component which is positive when directed upward (negative downward). Net upward radiation is the difference between radiation from below (upwelling) and radiation from above (downwelling). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + % + + + 1000 hour fuel moisture (FM1000) represents the modelled moisture content in the dead fuels in the 3 to 8 inch diameter class and the layer of the forest floor about 4 inches below the surface. The value is based on a running 7-day average. The 1000-hour time lag fuel moisture is a function of length of day (as influenced by latitude and calendar date), daily temperature and relative humidity extremes (maximum and minimum values) and the 24-hour precipitation duration values for a 7-day period. It is a component in the US National Fire Danger Rating System. The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + + + % + + + 100 hour fuel moisture (FM100) represents the modeled moisture content of dead fuels in the 1 to 3 inch diameter class. It can also be used as a very rough estimate of the average moisture content of the forest floor from three-fourths inch to 4 inches below the surface. The 100-hour timelag fuel moisture is a function of length of day (as influenced by latitude and calendar date), maximum and minimum temperature and relative humidity, and precipitation duration in the previous 24 hours. It is a component in the US National Fire Danger Rating System. The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + + + 1 + + + The Burning Index (BI) is a numeric value closely related to the flame length in feet multiplied by 10, which is related to the contribution of fire behaviour to the effort of containing a fire. The BI is a function of fire spread and fire intensity and is derived from a combination of Spread and Energy Release Components. The Spread Component is a rating of the forward rate of spread of a head fire and wind is a key input. The scale is open ended which allows the range of numbers to adequately define fire problems, even in time of low to moderate fire danger. Computed BI values represent the near upper limit to be expected on the rating area. In other words, if a fire occurs in the worst fuel, weather and topography conditions of the rating area, these numbers indicate its expected fire line intensities and flame length. It is an index in the US National Fire Danger Rating System. The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + + + J m-2 + + + The Energy Release Component (ERC) is a number related to the available energy per unit area within the flaming front at the head of a fire. It is usually given in BTU ft-2. Daily variations in ERC are due to changes in moisture content of the various fuels present, both live and dead. It may also be considered a composite fuel moisture value as it reflects the contribution that all live and dead fuels have to potential fire intensity. Energy Release Component is a cumulative index. The scale is open-ended and relative. Energy Release Component values depend on the fuel model input into the calculations and interpretation of precise values varies with ecology and region. It is an index in the US National Fire Danger Rating System. The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + + + 1 + + + Severe Fire Danger Index (SFDI) is the normalized product of normalized Energy Release Component (ERC) and normalized Burning Index (BI) from the United States National Fire Danger Rating System (NFDRS). While SFDI is not officially part of the National Fire Danger Rating System, it is related to and intended to supplement NFDRS. It is commonly categorized into five classes based on percentile: low (0-60), moderate (60-80), high (80-90), very high (90-97), and extreme (97-100). It can be extended to future conditions by introducing an unprecedented category for values above the historical 100th percentile. As it is locally normalized, its interpretation remains the same across space. + + 1 @@ -13614,6 +14034,13 @@ "Number concentration" means the number of particles or other specified objects per unit volume. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "stp" means standard temperature (0 degC) and pressure (101325 Pa). The surface called "surface" means the lower boundary of the atmosphere. + + m-3 + + + "Number concentration" means the number of particles or other specified objects per unit volume. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. + + m-3 @@ -13628,6 +14055,13 @@ "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + + m-3 + + + "Number concentration" means the number of particles or other specified objects per unit volume. "Pollen grain" refers to the male gametophyte of seed plants (either angiosperms or gymnosperms). The number concentration of pollen grains refers to the number of individual pollen grains per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + + m-3 @@ -13635,6 +14069,13 @@ The cloud condensation nuclei number concentration is the total number of aerosol particles per unit volume independent of and integrated over particle size that act as condensation nuclei for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. "stp" means standard temperature (0 degC) and pressure (101325 Pa). + + m-3 + + + "Number concentration" means the number of particles or other specified objects per unit volume. The cloud condensation nuclei number concentration is the total number of aerosol particles per unit volume independent of and integrated over particle size that act as condensation nuclei for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. + + m-3 @@ -13768,6 +14209,34 @@ A variable with the standard name of number_of_observations contains the number of discrete observations or measurements from which the values of another data variable have been derived. The linkage between the data variable and the variable with a standard_name of number_of_observations is achieved using the ancillary_variables attribute. + + m-3 + + + The aerosol particle number size distribution is the number concentration of aerosol particles as a function of particle diameter. A coordinate variable with the standard name of electrical_mobility_particle_diameter, aerodynamic_particle_diameter, or optical_particle_diameter should be specified to indicate that the property applies at specific particle sizes selected by the indicated method. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. "log10_X" means common logarithm (i.e. base 10) of X. "stp" means standard temperature (0 degC) and pressure (101325 Pa). + + + + m-3 + + + The aerosol particle number size distribution is the number concentration of aerosol particles as a function of particle diameter. A coordinate variable with the standard name of electrical_mobility_particle_diameter, aerodynamic_particle_diameter, or optical_particle_diameter should be specified to indicate that the property applies at specific particle sizes selected by the indicated method. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. + + + + m-3 + + + The cloud condensation nuclei number size distribution is the number concentration of aerosol particles as a function of particle diameter, where the particle acts as condensation nucleus for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. A coordinate variable with the standard name of electrical_mobility_particle_diameter should be specified to indicate that the property applies at specific mobility particle sizes. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. "stp" means standard temperature (0 degC) and pressure (101325 Pa). + + + + m-3 + + + The cloud condensation nuclei number size distribution is the number concentration of aerosol particles as a function of particle diameter, where the particle acts as condensation nucleus for liquid-phase clouds. A coordinate variable with the standard name of relative_humidity should be specified to indicate that the property refers to a specific supersaturation with respect to liquid water. A coordinate variable with the standard name of electrical_mobility_particle_diameter should be specified to indicate that the property applies at specific mobility particle sizes. To specify the relative humidity at which the particle sizes were selected, provide a scalar coordinate variable with the standard name of relative_humidity_for_aerosol_particle_size_selection. The ability of a particle to act as a condensation nucleus is determined by its size, chemical composition, and morphology. + + kg s-1 @@ -14342,6 +14811,13 @@ The partial pressure of a dissolved gas in sea water is the partial pressure in air with which it would be in equilibrium. The partial pressure of a gaseous constituent of air is the pressure that it would exert if all other gaseous constituents were removed, assuming the volume, the temperature, and its number of moles remain unchanged. The chemical formula for methane is CH4. + + degree_C + + + Perceived temperature (PT) is an equivalent air temperature of the actual thermal condition. It is the air temperature of a reference condition causing the same thermal perception in a human body considering air temperature, wind speed, humidity, solar and thermal radiation as well as clothing and activity level. It is not the perceived air temperature, that derives either from wind chill and heat index and has the standard_name apparent_air_temperature. + + m @@ -14398,6 +14874,13 @@ "Photolysis" is a chemical reaction in which a chemical compound is broken down by photons. The "reaction rate" is the rate at which the reactants of a chemical reaction form the products. The chemical formula for ozone is O3. The IUPAC name for ozone is trioxygen. "1D oxygen atom" means the singlet D state, an excited state, of the oxygen atom. The combined photolysis rate of ozone to both excited and ground state oxygen atoms has the standard name photolysis_rate_of_ozone. + + degree_C + + + Physiological equivalent temperature (PET) is an equivalent air temperature of the actual thermal condition. It is the air temperature of a reference condition without wind and solar radiation at which the heat budget of the human body is balanced with the same core and skin temperature. Note that PET here is not potential evapotranspiration. + + 1 @@ -17583,6 +18066,13 @@ "Radioactivity" means the number of radioactive decays of a material per second. "Radioactivity concentration" means radioactivity per unit volume of the medium. "Tc" means the element "technetium" and "99Tc" is the isotope "technetium-99" with a half-life of 7.79e+07 days. + + s + + + The quantity with standard name radio_signal_roundtrip_travel_time_in_air is the time taken for an electromagnetic signal to propagate from an emitting instrument such as a radar or lidar to a reflecting volume and back again. The signal returned to the instrument is the sum of all scattering from a given volume of air regardless of mechanism (examples are scattering by aerosols, hydrometeors and refractive index irregularities, or whatever else the instrument detects). + + m @@ -17660,6 +18150,13 @@ The quantity with standard name ratio_of_sea_water_practical_salinity_anomaly_to_relaxation_timescale is a correction term applied to modelled sea water practical salinity. The term is estimated as the deviation of model local sea water practical salinity from an observation-based climatology (e.g. World Ocean Database) weighted by a user-specified relaxation coefficient in s-1 (1/(relaxation timescale)). The phrase "ratio_of_X_to_Y" means X/Y. The term "anomaly" means difference from climatology. Practical Salinity, S_P, is a determination of the salinity of sea water, based on its electrical conductance. The measured conductance, corrected for temperature and pressure, is compared to the conductance of a standard potassium chloride solution, producing a value on the Practical Salinity Scale of 1978 (PSS-78). This name should not be used to describe salinity observations made before 1978, or ones not based on conductance measurements. Conversion of Practical Salinity to other precisely defined salinity measures should use the appropriate formulas specified by TEOS-10. Other standard names for precisely defined salinity quantities are sea_water_absolute_salinity (S_A); sea_water_preformed_salinity (S_*), sea_water_reference_salinity (S_R); sea_water_cox_salinity (S_C), used for salinity observations between 1967 and 1977; and sea_water_knudsen_salinity (S_K), used for salinity observations between 1901 and 1966. Salinity quantities that do not match any of the precise definitions should be given the more general standard name of sea_water_salinity. Reference: www.teos-10.org; Lewis, 1980 doi:10.1109/JOE.1980.1145448. + + sr + + + The ratio of volume extinction coefficient to volume backwards scattering coefficient by ranging instrument in air due to ambient aerosol particles (often called "lidar ratio") is the ratio of the "volume extinction coefficient" and the "volume backwards scattering coefficient of radiative flux by ranging instrument in air due to ambient aerosol particles". The ratio is assumed to be related to the same wavelength as the incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m s-2 @@ -17681,6 +18178,13 @@ Realization is used to label a dimension that can be thought of as a statistical sample, e.g., labelling members of a model ensemble. + + W + + + The quantity with standard name received_power_of_radio_wave_in_air_scattered_by_air refers to the received power of the signal at an instrument such as a radar or lidar. The signal returned to the instrument is the sum of all scattering from a given volume of air regardless of mechanism (examples are scattering by aerosols, hydrometeors and refractive index irregularities, or whatever else the instrument detects). + + Pa @@ -18262,6 +18766,13 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Air pressure at low frequency" means variations in air pressure with periods longer than 20 days. These give rise to corresponding variations in sea surface topography. The quantity sea_surface_height_correction_due_to_air_pressure_at_low_frequency is commonly called the "inverted barometer effect" and the correction should be applied by adding it to the quantity with standard name altimeter_range. Additional altimeter range corrections are given by the quantities with standard names altimeter_range_correction_due_to_wet_troposphere, altimeter_range_correction_due_to_dry_troposphere, altimeter_range_correction_due_to_ionosphere and sea_surface_height_correction_due_to_air_pressure_and_wind_at_high_frequency. + + m + + + Significant wave height is a statistic computed from wave measurements and corresponds to the average height of the highest one third of the waves, where the height is defined as the vertical distance from a wave trough to the following wave crest. Infragravity waves are waves occurring in the frequency range 0.04 to 0.004 s^-1 (wave periods of 25 to 250 seconds). + + 1 @@ -18563,6 +19074,13 @@ The wave directional spectrum can be written as a five dimensional function S(t,x,y,f,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), f is frequency and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. The quantity with standard name sea_surface_wave_energy_at_variance_spectral_density_maximum, sometimes called peak wave energy, is the maximum value of the variance spectral density (max(S1)). + + s-1 + + + Frequency is the number of oscillations of a wave per unit time. The sea_surface_wave_frequency_at_variance_spectral_density_maximum is the frequency of the most energetic waves in the total wave spectrum at a specific location. The wave directional spectrum can be written as a five dimensional function S(t,x,y,f,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), f is frequency and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. + + degree @@ -18682,6 +19200,13 @@ Wave slope describes an aspect of sea surface wave geometry related to sea surface roughness. Mean square slope describes a derivation over multiple waves within a sea-state, for example calculated from moments of the wave directional spectrum. The phrase "y_slope" indicates that slope values are derived from vector components along the grid y-axis. + + m + + + The wave directional spectrum can be written as a five dimensional function S(t,x,y,k,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), k is wavenumber and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. Wavenumber is the number of oscillations of a wave per unit distance. Wavenumber moments, M(n) of S1 can then be calculated as follows: M(n) = integral(S1 k^n dk), where k^n is k to the power of n. The inverse wave wavenumber, k(m-1), is calculated as the ratio M(-1)/M(0). The wavelength is the horizontal distance between repeated features on the waveform such as crests, troughs or upward passes through the mean level. + + m-1 @@ -18693,7 +19218,7 @@ s - A period is an interval of time, or the time-period of an oscillation. The sea_surface_wave_period_at_variance_spectral_density_maximum, sometimes called peak wave period, is the period of the most energetic waves in the total wave spectrum at a specific location. + A period is an interval of time, or the time-period of an oscillation. Wave period is the interval of time between repeated features on the waveform such as crests, troughs or upward passes through the mean level. The sea_surface_wave_period_at_variance_spectral_density_maximum, sometimes called peak wave period, is the period of the most energetic waves in the total wave spectrum at a specific location. The wave directional spectrum can be written as a five dimensional function S(t,x,y,f,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), f is frequency and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. @@ -18948,6 +19473,13 @@ + + S m-1 + + + The electrical conductivity of sea water in a sample measured at a defined reference temperature. The reference temperature should be recorded in a scalar coordinate variable, or a coordinate variable with a single dimension of size one, and the standard name of temperature_of_analysis_of_sea_water. This quantity is sometimes called 'specific conductivity' when the reference temperature 25 degrees Celsius. + + 1e-3 @@ -19361,6 +19893,20 @@ Convective precipitation is that produced by the convection schemes in an atmosphere model. Some atmosphere models differentiate between shallow and deep convection. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + Pa + + + Shear strength is the amount of force applied to a normal plane required to bring a frozen soil to failure along a tangential plane. Shear strength depends on the angle of friction and cohesion of the soil. + + + + Pa + + + Shear strength is the amount of force applied to a normal plane required to bring the soil to failure along a tangential plane. Shear strength depends on the angle of friction and cohesion of the soil. + + 1 @@ -19375,6 +19921,48 @@ "Single scattering albedo" is the fraction of radiation in an incident light beam scattered by the particles of an aerosol reference volume for a given wavelength. It is the ratio of the scattering and the extinction coefficients of the aerosol particles in the reference volume. A coordinate variable with a standard name of radiation_wavelength or radiation_frequency should be included to specify either the wavelength or frequency. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Particulate means suspended solids of all sizes. Biogenic silica is a hydrated form of silica (silicon dioxide) with the chemical formula SiO2.nH2O sometimes referred to as opaline silica or opal. It is created by biological processes and in sea water it is predominantly the skeletal material of diatoms. + + + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Particulate means suspended solids of all sizes. + + + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Particulate means suspended solids of all sizes. Particulate inorganic carbon is carbon bound in molecules ionically that may be liberated from the particles as carbon dioxide by acidification. + + + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. + + + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Particulate means suspended solids of all sizes. + + + + kg m-2 s-1 + + + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Sinking" is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Particulate means suspended solids of all sizes. + + mol m-2 s-1 @@ -19473,6 +20061,13 @@ Soil albedo is the albedo of the soil surface assuming no snow. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. + + kg m-3 + + + The density of the soil in its natural condition. Also known as bulk density. The density of a substance is its mass per unit volume. + + kg m-2 @@ -19606,6 +20201,13 @@ soil_water_ph is the measure of acidity of soil moisture, defined as the negative logarithm of the concentration of dissolved hydrogen ions in soil water. + + 1e-3 + + + The quantity with standard name soil_water_salinity is the salt content of soil water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. + + degree @@ -19809,6 +20411,13 @@ "Specific" means per unit mass. "Turbulent kinetic energy" is the kinetic energy of chaotic fluctuations of the fluid flow. + + Hz + + + The quantity with standard name spectral_width_of_radio_wave_in_air_scattered_by_air is the frequency width of the signal received by an instrument such as a radar or lidar. The signal returned to the instrument is the sum of all scattering from a given volume of air regardless of mechanism (examples are scattering by aerosols, hydrometeors and refractive index irregularities, or whatever else the instrument detects). + + m s-1 @@ -19949,6 +20558,13 @@ "Upward" indicates a vector component which is positive when directed upward (negative downward). Ocean transport means transport by all processes, both sea water and sea ice. "square_of_X" means X*X. + + K + + + In thermodynamics and fluid mechanics, stagnation temperature is the temperature at a stagnation point in a fluid flow. At a stagnation point the speed of the fluid is zero and all of the kinetic energy has been converted to internal energy and is added to the local static enthalpy. In both compressible and incompressible fluid flow, the stagnation temperature is equal to the total temperature at all points on the streamline leading to the stagnation point. In aviation, stagnation temperature is known as total air temperature and is measured by a temperature probe mounted on the surface of the aircraft. The probe is designed to bring the air to rest relative to the aircraft. As the air is brought to rest, kinetic energy is converted to internal energy. The air is compressed and experiences an adiabatic increase in temperature. Therefore, total air temperature is higher than the static (or ambient) air temperature. Total air temperature is an essential input to an air data computer in order to enable computation of static air temperature and hence true airspeed. + + 1 @@ -20383,6 +20999,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. In ocean biogeochemistry models, a "natural analogue" is used to simulate the effect on a modelled variable of imposing preindustrial atmospheric carbon dioxide concentrations, even when the model as a whole may be subjected to varying forcings. The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The chemical formula for carbon dioxide is CO2. + + kg m-2 s-1 + + + The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Non-wetland soils are all soils except for wetlands. Wetlands are areas where water covers the soil, or is present either at or near the surface of the soil all year or for varying periods of time during the year, including during the growing season. The precise conditions under which non-wetland soils produce and consume methane can vary between models. + + kg m-2 s-1 @@ -23596,6 +24219,34 @@ The surface called "surface" means the lower boundary of the atmosphere. Runoff is the liquid water which drains from land. If not specified, "runoff" refers to the sum of surface runoff and subsurface drainage. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + m s-1 + + + A velocity is a vector quantity. "x" indicates a vector component along the grid x-axis, positive with increasing x. Ocean currents are related to phenomena of different nature and processes, such as density currents, currents raised by the wind, tide, wave propagation, mass flow in estuaries, etc. This standard name refers to the sum of currents of all origins. + + + + m s-1 + + + A velocity is a vector quantity. "x" indicates a vector component along the grid x-axis, positive with increasing x. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Tides are the rise and fall of sea levels caused by the combined effects of the gravitational forces exerted by the Moon and the Sun, and the rotation of the Earth. This rise in water level is accompanied by a horizontal movement of water called the tidal current. + + + + m s-1 + + + A velocity is a vector quantity. "y" indicates a vector component along the grid y-axis, positive with increasing y. Ocean currents are related to phenomena of different nature and processes, such as density currents, currents raised by the wind, tide, wave propagation, mass flow in estuaries, etc. This Standard Name refers to the sum of currents of all origins. + + + + m s-1 + + + A velocity is a vector quantity. "y" indicates a vector component along the grid y-axis, positive with increasing y. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Tides are the rise and fall of sea levels caused by the combined effects of the gravitational forces exerted by the Moon and the Sun, and the rotation of the Earth. This rise in water level is accompanied by a horizontal movement of water called the tidal current. + + kg m-2 65 @@ -23743,6 +24394,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). The surface latent heat flux is the exchange of heat between the surface and the air on account of evaporation (including sublimation). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + W m-2 + + + The quantity with standard name surface_upward_latent_heat_flux_due_to_evaporation does not include transpiration from vegetation. The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation"). The surface latent heat flux is the exchange of heat between the surface and the air on account of evaporation (including sublimation). + + W m-2 @@ -23932,6 +24590,28 @@ "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Heterotrophic respiration is respiration by heterotrophs ("consumers"), which are organisms (including animals and decomposers) that consume other organisms or dead organic material, rather than synthesising organic material from inorganic precursors using energy from the environment (especially sunlight) as autotrophs ("producers") do. Heterotrophic respiration goes on within both the soil and litter pools. + + kg m-2 s-1 + + + Methane emitted from the surface, generated by biomass burning (fires). Positive direction upwards. +The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. the surface of the earth). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The term "fires" means all biomass fires, whether naturally occurring or ignited by humans. The precise conditions under which fires produce and consume methane can vary between models. + + + + kg m-2 s-1 + + + The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. the surface of the earth). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Herbivores are animals that feed on vegetation. Mammals are any vertebrates within the class Mammalia. Examples of large herbivorous mammals include cows, elks, and buffalos. These animals eat grass, tree bark, aquatic vegetation, and shrubby growth. Herbivores can also be medium-sized animals such as sheep and goats, which eat shrubby vegetation and grasses. Small herbivores include rabbits, chipmunks, squirrels, and mice. The precise conditions under which herbivorous mammals produce and consume methane can vary between models. + + + + kg m-2 s-1 + + + The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. the surface of the earth). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Termites belong to any of a group of cellulose-eating insects, the social system of which shows remarkable parallels with those of ants and bees, although it has evolved independently. The precise conditions under which termites produce and consume methane can vary between models. + + kg m-2 s-1 @@ -30120,6 +30800,13 @@ "Amount" means mass per unit area. The construction thickness_of_[X_]snowfall_amount means the accumulated "depth" of snow which fell i.e. the thickness of the layer of snow at its own density. There are corresponding standard names for liquid water equivalent (lwe) thickness. + + m + + + Depth or height of the organic soil horizon (O or H horizons per the World Reference Base soil classification system), measured from the soil surface down to the mineral horizon. Organic layers are commonly composed of a succession of litter of recognizable origin, of partly decomposed litter, and of highly decomposed (humic) organic material. + + m @@ -30509,14 +31196,14 @@ kg m-2 - "Amount" means mass per unit area. + "Amount" means mass per unit area. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. kg m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. @@ -30701,6 +31388,13 @@ The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Overcast" means a fractional sky cover of 95% or more when at least a portion of this amount is attributable to clouds or obscuring phenomena (such as haze, dust, smoke, fog, etc.) aloft. (Reference: AMS Glossary: http://glossary.ametsoc.org/wiki/Main_Page). Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_clear_sky. + + degree_C + + + Universal Thermal Comfort Index (UTCI) is an equivalent temperature of the actual thermal condition. Reference: utci.org. It is the air temperature of a reference condition causing the same dynamic physiological response in a human body considering its energy budget, physiology and clothing adaptation. + + m s-1 40 @@ -30820,6 +31514,13 @@ "Upward" indicates a vector component which is positive when directed upward (negative downward). The latent heat flux is the exchange of heat across a surface on account of evaporation and condensation (including sublimation and deposition). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + W m-2 + + + "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. The latent heat flux due to transpiration is the release of latent heat from plant surfaces to the air due to the release of water vapor. + + kg m-2 s-1 @@ -31023,6 +31724,13 @@ The vertical_component_of_ocean_xy_tracer_diffusivity means the vertical component of the diffusivity of tracers in the ocean due to lateral mixing. This quantity could appear in formulations of lateral diffusivity in which "lateral" does not mean "iso-level", e.g. it would not be used for isopycnal diffusivity. "Tracer diffusivity" means the diffusivity of heat and salinity due to motion which is not resolved on the grid scale of the model. + + kg m-2 + + + “Drainage” is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. The vertical drainage amount in soil is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. + + m @@ -31142,6 +31850,13 @@ The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeds pi/2 radians. A scattering_angle should not be specified with this quantity. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Dried_aerosol" means that the aerosol sample has been dried from the ambient state, but that the dry state (relative humidity less than 40 per cent) has not necessarily been reached. To specify the relative humidity at which the sample was measured, provide a scalar coordinate variable with the standard name of "relative_humidity". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m-1 sr-1 + + + Volume backwards scattering coefficient by ranging instrument is the fraction of radiative flux, per unit path length and per unit solid angle, scattered at 180 degrees angle respect to the incident radiation and obtained through ranging techniques like lidar and radar. Backwards scattering coefficient is assumed to be related to the same wavelength of incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m-1 @@ -31163,6 +31878,13 @@ Radiative flux is the sum of shortwave and longwave radiative fluxes. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. Attenuation is the sum of absorption and scattering. Attenuation is sometimes called "extinction". Beam attenuation refers to the decrease of radiative flux along the direction of the incident path. It is distinguished from attenuation of the downwelling component of radiative flux from any incident direction, also called "diffuse" attenuation. The phrase "corrected for pure water attenuance" means the attenuation coefficient has been adjusted/calibrated to remove the influence of absorption/scattering by the water itself. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. + + 1 + + + The volume extinction Angstrom exponent is the Angstrom exponent obtained for the aerosol extinction instead that for the aerosol optical thickness. It is alpha in the following equation relating aerosol extinction (ext) at the wavelength lambda to aerosol extinction at a different wavelength lambda0: ext(lambda) = ext(lambda0) * [lambda/lambda0] ** (-1 * alpha). "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m-1 @@ -31317,11 +32039,18 @@ "Water" means water in all phases. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + kg m-2 + + + "Evapotranspiration" means all water vapor fluxes into the atmosphere from the surface: liquid evaporation, sublimation, and transpiration. "Amount" means mass per unit area. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. + + kg m-2 s-1 evspsbl - Water means water in all phases. "Evapotranspiration" means all water vapor fluxes into the atmosphere from the surface: liquid evaporation, sublimation and transpiration. Evaporation is the conversion of liquid or solid into vapor. Transpiration is the process by which water is carried from the roots of plants and evaporates from the stomata. (The conversion of solid alone into vapor is called "sublimation".) In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. + Water means water in all phases. "Evapotranspiration" means all water vapor fluxes into the atmosphere from the surface: liquid evaporation, sublimation and transpiration. Evaporation is the conversion of liquid or solid into vapor. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. (The conversion of solid alone into vapor is called "sublimation".) In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. @@ -31429,6 +32158,13 @@ "Water" means water in all phases. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) Potential evaporation is the rate at which evaporation would take place under unaltered ambient conditions (temperature, relative humidity, wind, etc.) if the supply of water were unlimited, as if from an open water surface. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + kg m-2 + + + Potential evapotranspiration is the rate at which evapotranspiration would occur under ambient conditions from a uniformly vegetated area when the water supply is not limiting. "Evapotranspiration" means all water vapor fluxes into the atmosphere from the surface: liquid evaporation, sublimation and transpiration. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation"). Amount means mass per unit area. + + kg m-2 s-1 @@ -31654,6 +32390,10 @@ + + moles_of_particulate_inorganic_carbon_per_unit_mass_in_sea_water + + temperature_in_ground @@ -31662,6 +32402,62 @@ biological_taxon_lsid + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + electrical_mobility_diameter_of_ambient_aerosol_particles + + + + diameter_of_ambient_aerosol_particles + + + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + + + effective_radius_of_stratiform_cloud_rain_particles + + + + effective_radius_of_stratiform_cloud_ice_particles + + + + effective_radius_of_stratiform_cloud_graupel_particles + + + + effective_radius_of_convective_cloud_snow_particles + + + + effective_radius_of_convective_cloud_rain_particles + + + + effective_radius_of_convective_cloud_ice_particles + + + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + + + backscattering_ratio_in_air + + soot_content_of_surface_snow @@ -31690,86 +32486,6 @@ integral_wrt_time_of_surface_downward_northward_stress - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - - - surface_snow_density - - - - atmosphere_upward_relative_vorticity - - - - atmosphere_upward_absolute_vorticity - - - - area_type - - - - area_type - - - - mass_fraction_of_liquid_precipitation_in_air - - - - mass_fraction_of_liquid_precipitation_in_air - - - - tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - - - iron_growth_limitation_of_diazotrophic_phytoplankton - - - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - - - air_pseudo_equivalent_potential_temperature - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - sea_water_velocity_from_direction @@ -31842,368 +32558,272 @@ tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - effective_radius_of_stratiform_cloud_snow_particles - - - - tendency_of_atmosphere_moles_of_cfc11 - - - - moles_of_cfc11_per_unit_mass_in_sea_water - - - - atmosphere_moles_of_cfc11 - - - - tendency_of_atmosphere_moles_of_cfc113 - - - - atmosphere_moles_of_cfc113 - - - - tendency_of_atmosphere_moles_of_cfc114 - - - - atmosphere_moles_of_cfc114 - - - - tendency_of_atmosphere_moles_of_cfc115 - - - - atmosphere_moles_of_cfc115 - - - - tendency_of_atmosphere_moles_of_cfc12 - - - - atmosphere_moles_of_cfc12 - - - - tendency_of_atmosphere_moles_of_halon1202 - - - - atmosphere_moles_of_halon1202 - - - - tendency_of_atmosphere_moles_of_halon1211 - - - - atmosphere_moles_of_halon1211 - - - - tendency_of_atmosphere_moles_of_halon1301 - - - - atmosphere_moles_of_halon1301 - - - - tendency_of_atmosphere_moles_of_halon2402 - - - - atmosphere_moles_of_halon2402 - - - - tendency_of_atmosphere_moles_of_hcc140a - - - - atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcc140a - - - - tendency_of_middle_atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcfc22 + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc - - tendency_of_atmosphere_moles_of_hcfc22 + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - atmosphere_moles_of_hcfc22 + + mole_fraction_of_inorganic_bromine_in_air - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + water_vapor_saturation_deficit_in_air - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + tendency_of_atmosphere_moles_of_carbon_tetrachloride - - electrical_mobility_diameter_of_ambient_aerosol_particles + + tendency_of_atmosphere_moles_of_carbon_monoxide - - diameter_of_ambient_aerosol_particles + + platform_yaw - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + platform_pitch - - effective_radius_of_stratiform_cloud_rain_particles + + platform_roll - - effective_radius_of_stratiform_cloud_ice_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization - - effective_radius_of_stratiform_cloud_graupel_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton - - effective_radius_of_convective_cloud_snow_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton - - effective_radius_of_convective_cloud_rain_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms - - effective_radius_of_convective_cloud_ice_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - backscattering_ratio_in_air + + tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles - - floating_ice_shelf_area_fraction + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_moles_of_methane - - mole_fraction_of_methylglyoxal_in_air + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - mole_fraction_of_dichlorine_peroxide_in_air + + tendency_of_air_temperature_due_to_stratiform_precipitation - - atmosphere_mass_content_of_convective_cloud_liquid_water + + stratiform_precipitation_flux - - effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + stratiform_precipitation_amount - - air_equivalent_temperature + + lwe_thickness_of_stratiform_precipitation_amount - - air_pseudo_equivalent_temperature + + lwe_stratiform_precipitation_rate - - mass_content_of_cloud_liquid_water_in_atmosphere_layer + + water_evaporation_amount_from_canopy - - air_equivalent_potential_temperature + + water_evaporation_flux_from_canopy - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + precipitation_flux_onto_canopy - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + outgoing_water_volume_transport_along_river_channel - - effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice - - effective_radius_of_stratiform_cloud_liquid_water_particles + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + mass_fraction_of_mercury_dry_aerosol_particles_in_air - - effective_radius_of_convective_cloud_liquid_water_particles + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - effective_radius_of_cloud_liquid_water_particles + + surface_snow_density - - atmosphere_mass_content_of_cloud_liquid_water + + atmosphere_upward_relative_vorticity - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + atmosphere_upward_absolute_vorticity - - tendency_of_atmosphere_moles_of_methane + + area_type - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + area_type - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + mass_fraction_of_liquid_precipitation_in_air - - mole_fraction_of_inorganic_bromine_in_air + + mass_fraction_of_liquid_precipitation_in_air - - water_vapor_saturation_deficit_in_air + + tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - tendency_of_atmosphere_moles_of_carbon_monoxide + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - platform_yaw + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - platform_pitch + + mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - platform_roll + + iron_growth_limitation_of_diazotrophic_phytoplankton - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - tendency_of_air_temperature_due_to_stratiform_precipitation + + air_pseudo_equivalent_potential_temperature - - stratiform_precipitation_flux + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - stratiform_precipitation_amount + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - lwe_thickness_of_stratiform_precipitation_amount + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - lwe_stratiform_precipitation_rate + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - water_evaporation_amount_from_canopy + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - water_evaporation_flux_from_canopy + + stratiform_cloud_area_fraction - - precipitation_flux_onto_canopy + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - outgoing_water_volume_transport_along_river_channel + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice + + surface_upwelling_radiance_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission + + surface_upwelling_longwave_flux_in_air - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + incoming_water_volume_transport_along_river_channel - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + sea_water_potential_temperature_expressed_as_heat_content - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + sea_water_potential_temperature_expressed_as_heat_content - - stratiform_cloud_area_fraction + + sea_ice_temperature_expressed_as_heat_content - - magnitude_of_sea_ice_displacement + + sea_ice_temperature_expressed_as_heat_content @@ -32338,472 +32958,380 @@ surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + platform_name - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + water_vapor_partial_pressure_in_air - - soil_mass_content_of_carbon + + effective_radius_of_stratiform_cloud_snow_particles - - slow_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_moles_of_cfc11 - - root_mass_content_of_carbon + + moles_of_cfc11_per_unit_mass_in_sea_water - - miscellaneous_living_matter_mass_content_of_carbon + + atmosphere_moles_of_cfc11 - - fast_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_moles_of_cfc113 - - medium_soil_pool_mass_content_of_carbon + + atmosphere_moles_of_cfc113 - - leaf_mass_content_of_carbon + + tendency_of_atmosphere_moles_of_cfc114 - - carbon_mass_content_of_forestry_and_agricultural_products + + atmosphere_moles_of_cfc114 - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_atmosphere_moles_of_cfc115 - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + atmosphere_moles_of_cfc115 - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + tendency_of_atmosphere_moles_of_cfc12 - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + tendency_of_atmosphere_moles_of_halon2402 - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + atmosphere_moles_of_halon2402 - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + tendency_of_atmosphere_moles_of_hcc140a - - northward_transformed_eulerian_mean_air_velocity + + atmosphere_moles_of_hcc140a - - eastward_transformed_eulerian_mean_air_velocity - - - - surface_litter_mass_content_of_carbon - - - - litter_mass_content_of_carbon - - - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition - - - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton - - - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes - - - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - - - tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction - - - - volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles - - - - water_vapor_partial_pressure_in_air - - - - platform_name - - - - platform_id - - - - mass_flux_of_carbon_into_litter_from_vegetation - - - - subsurface_litter_mass_content_of_carbon - - - - stem_mass_content_of_carbon - - - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + tendency_of_troposphere_moles_of_hcc140a - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + tendency_of_middle_atmosphere_moles_of_hcc140a - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + tendency_of_troposphere_moles_of_hcfc22 - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + tendency_of_atmosphere_moles_of_hcfc22 - - surface_upwelling_radiance_per_unit_wavelength_in_air + + atmosphere_moles_of_hcfc22 - - surface_upwelling_longwave_flux_in_air + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure - - incoming_water_volume_transport_along_river_channel + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - sea_water_potential_temperature_expressed_as_heat_content + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - sea_water_potential_temperature_expressed_as_heat_content + + floating_ice_shelf_area_fraction - - sea_ice_temperature_expressed_as_heat_content + + atmosphere_moles_of_carbon_tetrachloride - - sea_ice_temperature_expressed_as_heat_content + + mole_fraction_of_methylglyoxal_in_air - - water_evapotranspiration_flux + + mole_fraction_of_dichlorine_peroxide_in_air - - surface_water_evaporation_flux + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - water_volume_transport_into_sea_water_from_rivers + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - stratiform_graupel_flux + + soil_mass_content_of_carbon - - wood_debris_mass_content_of_carbon + + slow_soil_pool_mass_content_of_carbon - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + root_mass_content_of_carbon - - water_flux_into_sea_water_from_rivers + + miscellaneous_living_matter_mass_content_of_carbon - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + fast_soil_pool_mass_content_of_carbon - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + medium_soil_pool_mass_content_of_carbon - - integral_wrt_depth_of_sea_water_temperature + + leaf_mass_content_of_carbon - - integral_wrt_depth_of_sea_water_temperature + + carbon_mass_content_of_forestry_and_agricultural_products - - integral_wrt_depth_of_sea_water_temperature + + carbon_mass_content_of_forestry_and_agricultural_products - - integral_wrt_depth_of_sea_water_temperature + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - integral_wrt_depth_of_sea_water_practical_salinity + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + eastward_transformed_eulerian_mean_air_velocity - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + surface_litter_mass_content_of_carbon - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + litter_mass_content_of_carbon - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_convective_cloud_liquid_water - - tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + air_equivalent_temperature - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + air_pseudo_equivalent_temperature - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + mass_content_of_cloud_liquid_water_in_atmosphere_layer - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + air_equivalent_potential_temperature - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + effective_radius_of_stratiform_cloud_liquid_water_particles - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - integral_wrt_time_of_toa_outgoing_longwave_flux + + effective_radius_of_convective_cloud_liquid_water_particles - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + effective_radius_of_cloud_liquid_water_particles - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + atmosphere_mass_content_of_cloud_liquid_water - - integral_wrt_time_of_surface_net_downward_longwave_flux + + atmosphere_moles_of_cfc12 - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + tendency_of_atmosphere_moles_of_halon1202 - - integral_wrt_time_of_surface_downward_latent_heat_flux + + atmosphere_moles_of_halon1202 - - integral_wrt_time_of_air_temperature_excess + + tendency_of_atmosphere_moles_of_halon1211 - - integral_wrt_time_of_air_temperature_deficit + + atmosphere_moles_of_halon1211 - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + tendency_of_atmosphere_moles_of_halon1301 - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_moles_of_halon1301 - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + platform_id - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + mass_flux_of_carbon_into_litter_from_vegetation - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + subsurface_litter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + stem_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + northward_transformed_eulerian_mean_air_velocity - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + surface_water_evaporation_flux - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + water_volume_transport_into_sea_water_from_rivers - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + stratiform_graupel_flux - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + wood_debris_mass_content_of_carbon - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + water_flux_into_sea_water_from_rivers - - lagrangian_tendency_of_air_pressure + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - lagrangian_tendency_of_air_pressure + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - air_pressure_at_mean_sea_level + + integral_wrt_depth_of_sea_water_temperature - - sea_floor_depth_below_geoid + + integral_wrt_depth_of_sea_water_temperature - - sea_surface_height_above_geoid + + integral_wrt_depth_of_sea_water_temperature - - sea_surface_height_above_geoid + + integral_wrt_depth_of_sea_water_temperature - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + integral_wrt_depth_of_sea_water_practical_salinity - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + magnitude_of_sea_ice_displacement @@ -33002,68 +33530,244 @@ ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - sea_surface_swell_wave_mean_period + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - sea_surface_wind_wave_mean_period + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - sea_surface_wave_mean_period + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - sea_surface_wind_wave_to_direction + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - sea_surface_swell_wave_to_direction + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - mass_content_of_water_in_soil + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - mass_content_of_water_in_soil_layer + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - sea_surface_wave_significant_height + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - sea_surface_wind_wave_significant_height + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - sea_surface_swell_wave_significant_height + + tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + ocean_heat_y_transport_due_to_parameterized_eddy_advection + + + + ocean_heat_x_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + + + integral_wrt_time_of_toa_outgoing_longwave_flux + + + + integral_wrt_time_of_toa_net_downward_shortwave_flux + + + + integral_wrt_time_of_surface_net_downward_shortwave_flux + + + + integral_wrt_time_of_surface_net_downward_longwave_flux + + + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + + + lagrangian_tendency_of_air_pressure + + + + lagrangian_tendency_of_air_pressure + + + + air_pressure_at_mean_sea_level + + + + sea_floor_depth_below_geoid + + + + sea_surface_height_above_geoid + + + + sea_surface_height_above_geoid + + + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + + + sea_surface_swell_wave_mean_period + + + + sea_surface_wind_wave_mean_period + + + + sea_surface_wave_mean_period + + + + sea_surface_wind_wave_to_direction + + + + atmosphere_moles_of_carbon_monoxide + + + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection + + + + tendency_of_atmosphere_moles_of_nitrous_oxide + + + + tendency_of_atmosphere_moles_of_molecular_hydrogen + + + + tendency_of_atmosphere_moles_of_methyl_chloride + + + + tendency_of_atmosphere_moles_of_methyl_bromide + + + + y_wind + + + + x_wind @@ -33410,6 +34114,118 @@ atmosphere_convective_available_potential_energy + + integral_wrt_time_of_surface_downward_sensible_heat_flux + + + + integral_wrt_time_of_surface_downward_latent_heat_flux + + + + integral_wrt_time_of_air_temperature_excess + + + + integral_wrt_time_of_air_temperature_deficit + + + + sea_water_y_velocity + + + + sea_water_x_velocity + + + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + + + atmosphere_moles_of_nitrous_oxide + + + + atmosphere_moles_of_molecular_hydrogen + + + + atmosphere_moles_of_methyl_chloride + + + + atmosphere_moles_of_methyl_bromide + + + + atmosphere_moles_of_methane + + + + equivalent_thickness_at_stp_of_atmosphere_ozone_content + + + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + + + sea_surface_swell_wave_to_direction + + + + mass_content_of_water_in_soil + + + + mass_content_of_water_in_soil_layer + + + + sea_surface_wave_significant_height + + + + sea_surface_wind_wave_significant_height + + + + sea_surface_swell_wave_significant_height + + + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles + + mass_concentration_of_chlorophyll_in_sea_water @@ -33438,14 +34254,6 @@ land_ice_surface_specific_mass_balance_rate - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - - - equivalent_thickness_at_stp_of_atmosphere_ozone_content - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion @@ -33502,22 +34310,6 @@ tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - - - atmosphere_net_upward_convective_mass_flux - - tendency_of_troposphere_moles_of_molecular_hydrogen @@ -33558,78 +34350,6 @@ tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_moles_of_nitrous_oxide - - - - tendency_of_atmosphere_moles_of_molecular_hydrogen - - - - tendency_of_atmosphere_moles_of_methyl_chloride - - - - tendency_of_atmosphere_moles_of_methyl_bromide - - - - y_wind - - - - x_wind - - - - sea_water_y_velocity - - - - sea_water_x_velocity - - - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - - - atmosphere_moles_of_nitrous_oxide - - - - atmosphere_moles_of_molecular_hydrogen - - - - atmosphere_moles_of_methyl_chloride - - - - atmosphere_moles_of_methyl_bromide - - - - atmosphere_moles_of_methane - - - - atmosphere_moles_of_carbon_monoxide - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection @@ -33770,46 +34490,6 @@ eastward_water_vapor_flux_in_air - - surface_upward_sensible_heat_flux - - - - surface_temperature - - - - surface_temperature - - - - surface_temperature - - - - surface_net_downward_radiative_flux - - - - mole_fraction_of_hypochlorous_acid_in_air - - - - mole_fraction_of_chlorine_monoxide_in_air - - - - mole_fraction_of_chlorine_dioxide_in_air - - - - wind_mixing_energy_flux_into_sea_water - - - - water_flux_into_sea_water - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves @@ -33838,6 +34518,30 @@ wave_frequency + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection + + + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + + + atmosphere_net_upward_convective_mass_flux + + + + mass_fraction_of_ozone_in_air + + + + mass_fraction_of_convective_cloud_condensed_water_in_air + + sea_surface_wind_wave_period @@ -33850,6 +34554,46 @@ mass_concentration_of_suspended_matter_in_sea_water + + surface_upward_sensible_heat_flux + + + + surface_temperature + + + + surface_temperature + + + + surface_temperature + + + + surface_net_downward_radiative_flux + + + + mole_fraction_of_hypochlorous_acid_in_air + + + + mole_fraction_of_chlorine_monoxide_in_air + + + + mole_fraction_of_chlorine_dioxide_in_air + + + + wind_mixing_energy_flux_into_sea_water + + + + water_flux_into_sea_water + + surface_drag_coefficient_in_air @@ -33878,6 +34622,10 @@ mole_fraction_of_ozone_in_air + + water_evapotranspiration_flux + + isotropic_shortwave_radiance_in_air @@ -33885,14 +34633,6 @@ isotropic_longwave_radiance_in_air - - - mass_fraction_of_ozone_in_air - - - - mass_fraction_of_convective_cloud_condensed_water_in_air - diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 38465472ee..0e6670533f 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -89,12 +89,12 @@ def callback(cube, field, filename): """ +from collections.abc import Iterable import contextlib import glob import importlib import itertools import os.path -import pathlib import threading import iris._constraints @@ -256,7 +256,8 @@ def context(self, **kwargs): def _generate_cubes(uris, callback, constraints): """Returns a generator of cubes given the URIs and a callback.""" - if isinstance(uris, (str, pathlib.PurePath)): + if isinstance(uris, str) or not isinstance(uris, Iterable): + # Make a string, or other single item, into an iterable. uris = [uris] # Group collections of uris by their iris handler @@ -273,6 +274,10 @@ def _generate_cubes(uris, callback, constraints): urls = [":".join(x) for x in groups] for cube in iris.io.load_http(urls, callback): yield cube + elif scheme == "data": + data_objects = [x[1] for x in groups] + for cube in iris.io.load_data_objects(data_objects, callback): + yield cube else: raise ValueError("Iris cannot handle the URI scheme: %s" % scheme) diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 5debc452ee..01a1bb689b 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -22,7 +22,7 @@ # # * Cope with auxiliary coordinate factories. # -# * Allow concatentation over a user specified dimension. +# * Allow concatenation over a user specified dimension. # @@ -160,6 +160,39 @@ def name(self): return self.defn.name() +class _DerivedCoordAndDims( + namedtuple("DerivedCoordAndDims", ["coord", "dims", "aux_factory"]) +): + """ + Container for a derived coordinate, the associated AuxCoordFactory, and the + associated data dimension(s) spanned over a :class:`iris.cube.Cube`. + + Args: + + * coord: + A :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` + coordinate instance. + + * dims: + A tuple of the data dimension(s) spanned by the coordinate. + + * aux_factory: + A :class:`iris.aux_factory.AuxCoordFactory` instance. + + """ + + __slots__ = () + + def __eq__(self, other): + """Do not take aux factories into account for equality.""" + result = NotImplemented + if isinstance(other, _DerivedCoordAndDims): + equal_coords = self.coord == other.coord + equal_dims = self.dims == other.dims + result = equal_coords and equal_dims + return result + + class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): """ Container for the metadata that defines a cell measure or ancillary @@ -280,6 +313,7 @@ def concatenate( check_aux_coords=True, check_cell_measures=True, check_ancils=True, + check_derived_coords=True, ): """ Concatenate the provided cubes over common existing dimensions. @@ -296,6 +330,30 @@ def concatenate( If True, raise an informative :class:`~iris.exceptions.ContatenateError` if registration fails. + * check_aux_coords + Checks if the points and bounds of auxiliary coordinates of the cubes + match. This check is not applied to auxiliary coordinates that span the + dimension the concatenation is occurring along. Defaults to True. + + * check_cell_measures + Checks if the data of cell measures of the cubes match. This check is + not applied to cell measures that span the dimension the concatenation + is occurring along. Defaults to True. + + * check_ancils + Checks if the data of ancillary variables of the cubes match. This + check is not applied to ancillary variables that span the dimension the + concatenation is occurring along. Defaults to True. + + * check_derived_coords + Checks if the points and bounds of derived coordinates of the cubes + match. This check is not applied to derived coordinates that span the + dimension the concatenation is occurring along. Note that differences + in scalar coordinates and dimensional coordinates used to derive the + coordinate are still checked. Checks for auxiliary coordinates used to + derive the coordinates can be ignored with `check_aux_coords`. Defaults + to True. + Returns: A :class:`iris.cube.CubeList` of concatenated :class:`iris.cube.Cube` instances. @@ -321,6 +379,7 @@ def concatenate( check_aux_coords, check_cell_measures, check_ancils, + check_derived_coords, ) if registered: axis = proto_cube.axis @@ -378,6 +437,8 @@ def __init__(self, cube): self.cm_metadata = [] self.ancillary_variables_and_dims = [] self.av_metadata = [] + self.derived_coords_and_dims = [] + self.derived_metadata = [] self.dim_mapping = [] # Determine whether there are any anonymous cube dimensions. @@ -437,6 +498,17 @@ def meta_key_func(dm): av_and_dims = _CoordAndDims(av, tuple(dims)) self.ancillary_variables_and_dims.append(av_and_dims) + def name_key_func(factory): + return factory.name() + + for factory in sorted(cube.aux_factories, key=name_key_func): + coord = factory.make_coord(cube.coord_dims) + dims = cube.coord_dims(coord) + metadata = _CoordMetaData(coord, dims) + self.derived_metadata.append(metadata) + coord_and_dims = _DerivedCoordAndDims(coord, tuple(dims), factory) + self.derived_coords_and_dims.append(coord_and_dims) + def _coordinate_differences(self, other, attr, reason="metadata"): """ Determine the names of the coordinates that differ between `self` and @@ -544,6 +616,14 @@ def match(self, other, error_on_mismatch): msgs.append( msg_template.format("Ancillary variables", *differences) ) + # Check derived coordinates. + if self.derived_metadata != other.derived_metadata: + differences = self._coordinate_differences( + other, "derived_metadata" + ) + msgs.append( + msg_template.format("Derived coordinates", *differences) + ) # Check scalar coordinates. if self.scalar_coords != other.scalar_coords: differences = self._coordinate_differences( @@ -597,6 +677,7 @@ def __init__(self, cube_signature): self.ancillary_variables_and_dims = ( cube_signature.ancillary_variables_and_dims ) + self.derived_coords_and_dims = cube_signature.derived_coords_and_dims self.dim_coords = cube_signature.dim_coords self.dim_mapping = cube_signature.dim_mapping self.dim_extents = [] @@ -779,6 +860,11 @@ def concatenate(self): # Concatenate the new ancillary variables ancillary_variables_and_dims = self._build_ancillary_variables() + # Concatenate the new aux factories + aux_factories = self._build_aux_factories( + dim_coords_and_dims, aux_coords_and_dims + ) + # Concatenate the new data payload. data = self._build_data() @@ -790,6 +876,7 @@ def concatenate(self): aux_coords_and_dims=aux_coords_and_dims, cell_measures_and_dims=cell_measures_and_dims, ancillary_variables_and_dims=ancillary_variables_and_dims, + aux_factories=aux_factories, **kwargs, ) else: @@ -807,6 +894,7 @@ def register( check_aux_coords=False, check_cell_measures=False, check_ancils=False, + check_derived_coords=False, ): """ Determine whether the given source-cube is suitable for concatenation @@ -827,6 +915,31 @@ def register( * error_on_mismatch: If True, raise an informative error if registration fails. + * check_aux_coords + Checks if the points and bounds of auxiliary coordinates of the + cubes match. This check is not applied to auxiliary coordinates + that span the dimension the concatenation is occurring along. + Defaults to False. + + * check_cell_measures + Checks if the data of cell measures of the cubes match. This check + is not applied to cell measures that span the dimension the + concatenation is occurring along. Defaults to False. + + * check_ancils + Checks if the data of ancillary variables of the cubes match. This + check is not applied to ancillary variables that span the dimension + the concatenation is occurring along. Defaults to False. + + * check_derived_coords + Checks if the points and bounds of derived coordinates of the cubes + match. This check is not applied to derived coordinates that span + the dimension the concatenation is occurring along. Note that + differences in scalar coordinates and dimensional coordinates used + to derive the coordinate are still checked. Checks for auxiliary + coordinates used to derive the coordinates can be ignored with + `check_aux_coords`. Defaults to False. + Returns: Boolean. @@ -905,6 +1018,21 @@ def register( if not coord_a == coord_b: match = False + # Check for compatible derived coordinates. + if match: + if check_derived_coords: + for coord_a, coord_b in zip( + self._cube_signature.derived_coords_and_dims, + cube_signature.derived_coords_and_dims, + ): + # Derived coords that span the candidate axis can differ + if ( + candidate_axis not in coord_a.dims + or candidate_axis not in coord_b.dims + ): + if not coord_a == coord_b: + match = False + if match: # Register the cube as a source-cube for this proto-cube. self._add_skeleton(coord_signature, cube.lazy_data()) @@ -1088,6 +1216,64 @@ def _build_ancillary_variables(self): return ancillary_variables_and_dims + def _build_aux_factories(self, dim_coords_and_dims, aux_coords_and_dims): + """ + Generate the aux factories for the new concatenated cube. + + Args: + + * dim_coords_and_dims: + A list of dimension coordinate and dimension tuple pairs from the + concatenated cube. + + * aux_coords_and_dims: + A list of auxiliary coordinates and dimension(s) tuple pairs from + the concatenated cube. + + Returns: + A list of :class:`iris.aux_factory.AuxCoordFactory`. + + """ + # Setup convenience hooks. + cube_signature = self._cube_signature + old_dim_coords = cube_signature.dim_coords + old_aux_coords = [a[0] for a in cube_signature.aux_coords_and_dims] + new_dim_coords = [d[0] for d in dim_coords_and_dims] + new_aux_coords = [a[0] for a in aux_coords_and_dims] + scalar_coords = cube_signature.scalar_coords + + aux_factories = [] + + # Generate all the factories for the new concatenated cube. + for i, (coord, dims, factory) in enumerate( + cube_signature.derived_coords_and_dims + ): + # Check whether the derived coordinate of the factory spans the + # nominated dimension of concatenation. + if self.axis in dims: + # Update the dependencies of the factory with coordinates of + # the concatenated cube. We need to check all coordinate types + # here (dim coords, aux coords, and scalar coords). + new_dependencies = {} + for old_dependency in factory.dependencies.values(): + if old_dependency in old_dim_coords: + dep_idx = old_dim_coords.index(old_dependency) + new_dependency = new_dim_coords[dep_idx] + elif old_dependency in old_aux_coords: + dep_idx = old_aux_coords.index(old_dependency) + new_dependency = new_aux_coords[dep_idx] + else: + dep_idx = scalar_coords.index(old_dependency) + new_dependency = scalar_coords[dep_idx] + new_dependencies[id(old_dependency)] = new_dependency + + # Create new factory with the updated dependencies. + factory = factory.updated(new_dependencies) + + aux_factories.append(factory) + + return aux_factories + def _build_data(self): """ Generate the data payload for the new concatenated cube. diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index e0566fc8f2..4c294a7d2f 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -47,6 +47,15 @@ def is_lazy_data(data): return result +def is_lazy_masked_data(data): + """ + Return True if the argument is both an Iris 'lazy' data array and the + underlying array is of masked type. Otherwise return False. + + """ + return is_lazy_data(data) and ma.isMA(da.utils.meta_from_array(data)) + + @lru_cache def _optimum_chunksize_internals( chunks, diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 5ca5f31a8e..0f748d6d34 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -298,7 +298,7 @@ class _CoordSignature( ): """ Criterion for identifying a specific type of :class:`iris.cube.Cube` - based on its scalar and vector coorinate data and metadata, and + based on its scalar and vector coordinate data and metadata, and auxiliary coordinate factories. Args: @@ -516,7 +516,7 @@ class _Relation(namedtuple("Relation", ["separable", "inseparable"])): * separable: A set of independent candidate dimension names. - * inseperable: + * inseparable: A set of dependent candidate dimension names. """ @@ -1419,7 +1419,7 @@ def _define_space(self, space, positions, indexes, function_matrix): """ - # Heuristic reordering of coordinate defintion indexes into + # Heuristic reordering of coordinate definition indexes into # preferred dimension order. def axis_and_name(name): axis_dict = {"T": 1, "Z": 2, "Y": 3, "X": 4} @@ -1467,7 +1467,7 @@ def axis_and_name(name): } else: # TODO: Consider appropriate sort order (ascending, - # decending) i.e. use CF positive attribute. + # descending) i.e. use CF positive attribute. cells = sorted(indexes[name]) points = np.array( [cell.point for cell in cells], diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 6b0d4cf0f3..4e0fcfb1ea 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -264,13 +264,11 @@ def __init__(self, title, cell_methods): self.names = [] self.values = [] self.contents = [] - for method in cell_methods: - name = method.method - # Remove "method: " from the front of the string, leaving the value. - value = str(method)[len(name + ": ") :] - self.names.append(name) + for index, method in enumerate(cell_methods): + value = str(method) + self.names.append(str(index)) self.values.append(value) - content = "{}: {}".format(name, value) + content = "{}: {}".format(index, value) self.contents.append(content) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index f34cda1402..4cd9ccbe05 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -35,12 +35,18 @@ """ -from collections import OrderedDict +from __future__ import annotations + from collections.abc import Iterable import functools from functools import wraps +from inspect import getfullargspec +import itertools +from numbers import Number +from typing import Optional, Union import warnings +from cf_units import Unit import dask.array as da import numpy as np import numpy.ma as ma @@ -55,7 +61,9 @@ ) from iris.analysis._regrid import CurvilinearRegridder, RectilinearRegridder import iris.coords +from iris.coords import _DimensionalMetadata from iris.exceptions import LazyAggregatorError +import iris.util __all__ = ( "Aggregator", @@ -467,11 +475,13 @@ def __init__( Kwargs: * units_func (callable): - | *Call signature*: (units) + | *Call signature*: (units, \**kwargs) If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. + To ensure backwards-compatibility, also accepts a callable with + call signature (units). * lazy_func (callable or None): An alternative to :data:`call_func` implementing a lazy @@ -479,7 +489,8 @@ def __init__( main operation, but should raise an error in unhandled cases. Additional kwargs:: - Passed through to :data:`call_func` and :data:`lazy_func`. + Passed through to :data:`call_func`, :data:`lazy_func`, and + :data:`units_func`. Aggregators are used by cube aggregation methods such as :meth:`~iris.cube.Cube.collapsed` and @@ -625,7 +636,11 @@ def update_metadata(self, cube, coords, **kwargs): """ # Update the units if required. if self.units_func is not None: - cube.units = self.units_func(cube.units) + argspec = getfullargspec(self.units_func) + if argspec.varkw is None: # old style + cube.units = self.units_func(cube.units) + else: # new style (preferred) + cube.units = self.units_func(cube.units, **kwargs) def post_process(self, collapsed_cube, data_result, coords, **kwargs): """ @@ -693,13 +708,13 @@ class PercentileAggregator(_Aggregator): """ def __init__(self, units_func=None, **kwargs): - """ + r""" Create a percentile aggregator. Kwargs: * units_func (callable): - | *Call signature*: (units) + | *Call signature*: (units, \**kwargs) If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a @@ -934,13 +949,13 @@ class WeightedPercentileAggregator(PercentileAggregator): """ def __init__(self, units_func=None, lazy_func=None, **kwargs): - """ + r""" Create a weighted percentile aggregator. Kwargs: * units_func (callable): - | *Call signature*: (units) + | *Call signature*: (units, \**kwargs) If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a @@ -1172,8 +1187,112 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): return result +class _Weights(np.ndarray): + """Class for handling weights for weighted aggregation. + + This subclasses :class:`numpy.ndarray`; thus, all methods and properties of + :class:`numpy.ndarray` (e.g., `shape`, `ndim`, `view()`, etc.) are + available. + + Details on subclassing :class:`numpy.ndarray` are given here: + https://numpy.org/doc/stable/user/basics.subclassing.html + + """ + + def __new__(cls, weights, cube, units=None): + """Create class instance. + + Args: + + * weights (Cube, string, _DimensionalMetadata, array-like): + If given as a :class:`iris.cube.Cube`, use its data and units. If + given as a :obj:`str` or :class:`iris.coords._DimensionalMetadata`, + assume this is (the name of) a + :class:`iris.coords._DimensionalMetadata` object of the cube (i.e., + one of :meth:`iris.cube.Cube.coords`, + :meth:`iris.cube.Cube.cell_measures`, or + :meth:`iris.cube.Cube.ancillary_variables`). If given as an + array-like object, use this directly and assume units of `1`. If + `units` is given, ignore all units derived above and use the ones + given by `units`. + * cube (Cube): + Input cube for aggregation. If weights is given as :obj:`str` or + :class:`iris.coords._DimensionalMetadata`, try to extract the + :class:`iris.coords._DimensionalMetadata` object and corresponding + dimensional mappings from this cube. Otherwise, this argument is + ignored. + * units (string, Unit): + If ``None``, use units derived from `weights`. Otherwise, overwrite + the units derived from `weights` and use `units`. + + """ + # `weights` is a cube + # Note: to avoid circular imports of Cube we use duck typing using the + # "hasattr" syntax here + # --> Extract data and units from cube + if hasattr(weights, "add_aux_coord"): + obj = np.asarray(weights.data).view(cls) + obj.units = weights.units + + # `weights`` is a string or _DimensionalMetadata object + # --> Extract _DimensionalMetadata object from cube, broadcast it to + # correct shape using the corresponding dimensional mapping, and use + # its data and units + elif isinstance(weights, (str, _DimensionalMetadata)): + dim_metadata = cube._dimensional_metadata(weights) + arr = dim_metadata._values + if dim_metadata.shape != cube.shape: + arr = iris.util.broadcast_to_shape( + arr, + cube.shape, + dim_metadata.cube_dims(cube), + ) + obj = np.asarray(arr).view(cls) + obj.units = dim_metadata.units + + # Remaining types (e.g., np.ndarray): try to convert to ndarray. + else: + obj = np.asarray(weights).view(cls) + obj.units = Unit("1") + + # Overwrite units from units argument if necessary + if units is not None: + obj.units = units + + return obj + + def __array_finalize__(self, obj): + """See https://numpy.org/doc/stable/user/basics.subclassing.html. + + Note + ---- + `obj` cannot be `None` here since ``_Weights.__new__`` does not call + ``super().__new__`` explicitly. + + """ + self.units = getattr(obj, "units", Unit("1")) + + @classmethod + def update_kwargs(cls, kwargs, cube): + """Update ``weights`` keyword argument in-place. + + Args: + + * kwargs (dict): + Keyword arguments that will be updated in-place if a `weights` + keyword is present which is not ``None``. + * cube (Cube): + Input cube for aggregation. If weights is given as :obj:`str`, try + to extract a cell measure with the corresponding name from this + cube. Otherwise, this argument is ignored. + + """ + if kwargs.get("weights") is not None: + kwargs["weights"] = cls(kwargs["weights"], cube) + + def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): - """Return an aggregator function that can explicitely handle weights. + """Return an aggregator function that can explicitly handle weights. Args: @@ -1398,7 +1517,7 @@ def _weighted_quantile_1D(data, weights, quantiles, **kwargs): array or float. Calculated quantile values (set to np.nan wherever sum of weights is zero or masked) """ - # Return np.nan if no useable points found + # Return np.nan if no usable points found if np.isclose(weights.sum(), 0.0) or ma.is_masked(weights.sum()): return np.resize(np.array(np.nan), len(quantiles)) # Sort the data @@ -1535,7 +1654,7 @@ def _proportion(array, function, axis, **kwargs): # Otherwise, it is possible for numpy to return a masked array that has # a dtype for its data that is different to the dtype of the fill-value, # which can cause issues outside this function. - # Reference - tests/unit/analyis/test_PROPORTION.py Test_masked.test_ma + # Reference - tests/unit/analysis/test_PROPORTION.py Test_masked.test_ma numerator = _count(array, axis=axis, function=function, **kwargs) result = ma.asarray(numerator / total_non_masked) @@ -1583,27 +1702,19 @@ def _lazy_max_run(array, axis=-1, **kwargs): def _rms(array, axis, **kwargs): - # XXX due to the current limitations in `da.average` (see below), maintain - # an explicit non-lazy aggregation function for now. - # Note: retaining this function also means that if weights are passed to - # the lazy aggregator, the aggregation will fall back to using this - # non-lazy aggregator. - rval = np.sqrt(ma.average(np.square(array), axis=axis, **kwargs)) - if not ma.isMaskedArray(array): - rval = np.asarray(rval) + rval = np.sqrt(ma.average(array**2, axis=axis, **kwargs)) + return rval -@_build_dask_mdtol_function def _lazy_rms(array, axis, **kwargs): - # XXX This should use `da.average` and not `da.mean`, as does the above. - # However `da.average` current doesn't handle masked weights correctly - # (see https://github.com/dask/dask/issues/3846). - # To work around this we use da.mean, which doesn't support weights at - # all. Thus trying to use this aggregator with weights will currently - # raise an error in dask due to the unexpected keyword `weights`, - # rather than silently returning the wrong answer. - return da.sqrt(da.mean(array**2, axis=axis, **kwargs)) + # Note that, since we specifically need the ma version of average to handle + # weights correctly with masked data, we cannot rely on NEP13/18 and need + # to implement a separate lazy RMS function. + + rval = da.sqrt(da.ma.average(array**2, axis=axis, **kwargs)) + + return rval def _sum(array, **kwargs): @@ -1638,6 +1749,18 @@ def _sum(array, **kwargs): return rvalue +def _sum_units_func(units, **kwargs): + """Multiply original units with weight units if possible.""" + weights = kwargs.get("weights") + if weights is None: # no weights given or weights are None + result = units + elif hasattr(weights, "units"): # weights are _Weights + result = units * weights.units + else: # weights are regular np.ndarrays + result = units + return result + + def _peak(array, **kwargs): def column_segments(column): nan_indices = np.where(np.isnan(column))[0] @@ -1753,7 +1876,7 @@ def interp_order(length): COUNT = Aggregator( "count", _count, - units_func=lambda units: 1, + units_func=lambda units, **kwargs: 1, lazy_func=_build_dask_mdtol_function(_count), ) """ @@ -1785,7 +1908,7 @@ def interp_order(length): MAX_RUN = Aggregator( None, iris._lazy_data.non_lazy(_lazy_max_run), - units_func=lambda units: 1, + units_func=lambda units, **kwargs: 1, lazy_func=_build_dask_mdtol_function(_lazy_max_run), ) """ @@ -1913,6 +2036,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.MEDIAN) + This aggregator handles masked data, but NOT lazy data. For lazy aggregation, please try :obj:`~.PERCENTILE`. @@ -2029,7 +2153,11 @@ def interp_order(length): """ -PROPORTION = Aggregator("proportion", _proportion, units_func=lambda units: 1) +PROPORTION = Aggregator( + "proportion", + _proportion, + units_func=lambda units, **kwargs: 1, +) """ An :class:`~iris.analysis.Aggregator` instance that calculates the proportion, as a fraction, of :class:`~iris.cube.Cube` data occurrences @@ -2071,14 +2199,16 @@ def interp_order(length): the root mean square over a :class:`~iris.cube.Cube`, as computed by ((x0**2 + x1**2 + ... + xN-1**2) / N) ** 0.5. -Additional kwargs associated with the use of this aggregator: +Parameters +---------- -* weights (float ndarray): +weights : array-like, optional Weights matching the shape of the cube or the length of the window for rolling window operations. The weights are applied to the squares when taking the mean. -**For example**: +Example +------- To compute the zonal root mean square over the *longitude* axis of a cube:: @@ -2128,6 +2258,7 @@ def interp_order(length): SUM = WeightedAggregator( "sum", _sum, + units_func=_sum_units_func, lazy_func=_build_dask_mdtol_function(_sum), ) """ @@ -2165,7 +2296,7 @@ def interp_order(length): VARIANCE = Aggregator( "variance", ma.var, - units_func=lambda units: units * units, + units_func=lambda units, **kwargs: units * units, lazy_func=_build_dask_mdtol_function(da.var), ddof=1, ) @@ -2257,8 +2388,11 @@ class _Groupby: """ def __init__( - self, groupby_coords, shared_coords=None, climatological=False - ): + self, + groupby_coords: list[iris.coords.Coord], + shared_coords: Optional[list[tuple[iris.coords.Coord, int]]] = None, + climatological: bool = False, + ) -> None: """ Determine the group slices over the group-by coordinates. @@ -2282,15 +2416,15 @@ def __init__( """ #: Group-by and shared coordinates that have been grouped. - self.coords = [] - self._groupby_coords = [] - self._shared_coords = [] - self._slices_by_key = OrderedDict() + self.coords: list[iris.coords.Coord] = [] + self._groupby_coords: list[iris.coords.Coord] = [] + self._shared_coords: list[tuple[iris.coords.Coord, int]] = [] + self._groupby_indices: list[tuple[int, ...]] = [] self._stop = None # Ensure group-by coordinates are iterable. if not isinstance(groupby_coords, Iterable): raise TypeError( - "groupby_coords must be a " "`collections.Iterable` type." + "groupby_coords must be a `collections.Iterable` type." ) # Add valid group-by coordinates. @@ -2302,7 +2436,7 @@ def __init__( # Ensure shared coordinates are iterable. if not isinstance(shared_coords, Iterable): raise TypeError( - "shared_coords must be a " "`collections.Iterable` type." + "shared_coords must be a `collections.Iterable` type." ) # Add valid shared coordinates. for coord, dim in shared_coords: @@ -2313,9 +2447,11 @@ def __init__( # Stores mapping from original cube coords to new ones, as metadata may # not match - self.coord_replacement_mapping = [] + self.coord_replacement_mapping: list[ + tuple[iris.coords.Coord, iris.coords.Coord] + ] = [] - def _add_groupby_coord(self, coord): + def _add_groupby_coord(self, coord: iris.coords.Coord) -> None: if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) if self._stop is None: @@ -2324,12 +2460,12 @@ def _add_groupby_coord(self, coord): raise ValueError("Group-by coordinates have different lengths.") self._groupby_coords.append(coord) - def _add_shared_coord(self, coord, dim): + def _add_shared_coord(self, coord: iris.coords.Coord, dim: int) -> None: if coord.shape[dim] != self._stop and self._stop is not None: raise ValueError("Shared coordinates have different lengths.") self._shared_coords.append((coord, dim)) - def group(self): + def group(self) -> list[tuple[int, ...]]: """ Calculate the groups and associated slices over one or more group-by coordinates. @@ -2338,147 +2474,84 @@ def group(self): group slices. Returns: - A generator of the coordinate group slices. - - """ - if self._groupby_coords: - if not self._slices_by_key: - items = [] - groups = [] - - for coord in self._groupby_coords: - groups.append(iris.coords._GroupIterator(coord.points)) - items.append(next(groups[-1])) - - # Construct the group slice for each group over the group-by - # coordinates. Keep constructing until all group-by coordinate - # groups are exhausted. - while any([item is not None for item in items]): - # Determine the extent (start, stop) of the group given - # each current group-by coordinate group. - start = max( - [ - item.groupby_slice.start - for item in items - if item is not None - ] - ) - stop = min( - [ - item.groupby_slice.stop - for item in items - if item is not None - ] - ) - # Construct composite group key for the group using the - # start value from each group-by coordinate. - key = tuple( - [coord.points[start] for coord in self._groupby_coords] - ) - # Associate group slice with group key within the ordered - # dictionary. - self._slices_by_key.setdefault(key, []).append( - slice(start, stop) - ) - # Prepare for the next group slice construction over the - # group-by coordinates. - for item_index, item in enumerate(items): - if item is None: - continue - # Get coordinate current group slice. - groupby_slice = item.groupby_slice - # Determine whether coordinate has spanned all its - # groups i.e. its full length - # or whether we need to get the coordinates next group. - if groupby_slice.stop == self._stop: - # This coordinate has exhausted all its groups, - # so remove it. - items[item_index] = None - elif groupby_slice.stop == stop: - # The current group of this coordinate is - # exhausted, so get the next one. - items[item_index] = next(groups[item_index]) - - # Merge multiple slices together into one tuple. - self._slice_merge() - # Calculate the new group-by coordinates. - self._compute_groupby_coords() - # Calculate the new shared coordinates. - self._compute_shared_coords() - # Generate the group-by slices/groups. - for groupby_slice in self._slices_by_key.values(): - yield groupby_slice - - return - - def _slice_merge(self): - """ - Merge multiple slices into one tuple and collapse items from - containing list. - - """ - # Iterate over the ordered dictionary in order to reduce - # multiple slices into a single tuple and collapse - # all items from containing list. - for key, groupby_slices in self._slices_by_key.items(): - if len(groupby_slices) > 1: - # Compress multiple slices into tuple representation. - groupby_indicies = [] - - for groupby_slice in groupby_slices: - groupby_indicies.extend( - range(groupby_slice.start, groupby_slice.stop) - ) - - self._slices_by_key[key] = tuple(groupby_indicies) - else: - # Remove single inner slice from list. - self._slices_by_key[key] = groupby_slices[0] - - def _compute_groupby_coords(self): + A list of the coordinate group slices. + + """ + if not self._groupby_indices: + # Construct the group indices for each group over the group-by + # coordinates. Keep constructing until all group-by coordinate + # groups are exhausted. + + def group_iterator(points): + start = 0 + for _, group in itertools.groupby(points): + stop = sum((1 for _ in group), start) + yield slice(start, stop) + start = stop + + groups = [group_iterator(c.points) for c in self._groupby_coords] + groupby_slices = [next(group) for group in groups] + indices_by_key: dict[ + tuple[Union[Number, str], ...], list[int] + ] = {} + while any(s is not None for s in groupby_slices): + # Determine the extent (start, stop) of the group given + # each current group-by coordinate group. + start = max(s.start for s in groupby_slices if s is not None) + stop = min(s.stop for s in groupby_slices if s is not None) + # Construct composite group key for the group using the + # start value from each group-by coordinate. + key = tuple( + coord.points[start] for coord in self._groupby_coords + ) + # Associate group slice with group key within the ordered + # dictionary. + indices_by_key.setdefault(key, []).extend(range(start, stop)) + # Prepare for the next group slice construction over the + # group-by coordinates. + for index, groupby_slice in enumerate(groupby_slices): + if groupby_slice is None: + continue + # Determine whether coordinate has spanned all its + # groups i.e. its full length + # or whether we need to get the coordinates next group. + if groupby_slice.stop == self._stop: + # This coordinate has exhausted all its groups, + # so remove it. + groupby_slices[index] = None + elif groupby_slice.stop == stop: + # The current group of this coordinate is + # exhausted, so get the next one. + groupby_slices[index] = next(groups[index]) + + # Cache the indices + self._groupby_indices = [tuple(i) for i in indices_by_key.values()] + # Calculate the new group-by coordinates. + self._compute_groupby_coords() + # Calculate the new shared coordinates. + self._compute_shared_coords() + + # Return the group-by indices/groups. + return self._groupby_indices + + def _compute_groupby_coords(self) -> None: """Create new group-by coordinates given the group slices.""" - - groupby_slice = [] - - # Iterate over the ordered dictionary in order to construct - # a group-by slice that samples the first element from each group. - for key_slice in self._slices_by_key.values(): - if isinstance(key_slice, tuple): - groupby_slice.append(key_slice[0]) - else: - groupby_slice.append(key_slice.start) - - groupby_slice = np.array(groupby_slice) + # Construct a group-by slice that samples the first element from each + # group. + groupby_slice = np.array([i[0] for i in self._groupby_indices]) # Create new group-by coordinates from the group-by slice. self.coords = [coord[groupby_slice] for coord in self._groupby_coords] - def _compute_shared_coords(self): + def _compute_shared_coords(self) -> None: """Create the new shared coordinates given the group slices.""" - - groupby_indices = [] - groupby_bounds = [] - - # Iterate over the ordered dictionary in order to construct a list of - # tuple group indices, and a list of the respective bounds of those - # indices. - for key_slice in self._slices_by_key.values(): - if isinstance(key_slice, tuple): - indices = key_slice - else: - indices = tuple(range(*key_slice.indices(self._stop))) - - groupby_indices.append(indices) - groupby_bounds.append((indices[0], indices[-1])) - - # Create new shared bounded coordinates. for coord, dim in self._shared_coords: climatological_coord = ( self.climatological and coord.units.is_time_reference() ) if coord.points.dtype.kind in "SU": if coord.bounds is None: - new_points = [] + new_points_list = [] new_bounds = None # np.apply_along_axis does not work with str.join, so we # need to loop through the array directly. First move axis @@ -2486,32 +2559,32 @@ def _compute_shared_coords(self): work_arr = np.moveaxis(coord.points, dim, -1) shape = work_arr.shape work_shape = (-1, shape[-1]) - new_shape = (len(self),) + new_shape: tuple[int, ...] = (len(self),) if coord.ndim > 1: new_shape += shape[:-1] work_arr = work_arr.reshape(work_shape) - for indices in groupby_indices: + for indices in self._groupby_indices: for arr in work_arr: - new_points.append("|".join(arr.take(indices))) + new_points_list.append("|".join(arr.take(indices))) # Reinstate flattened dimensions. Aggregated dim now leads. - new_points = np.array(new_points).reshape(new_shape) + new_points = np.array(new_points_list).reshape(new_shape) # Move aggregated dimension back to position it started in. new_points = np.moveaxis(new_points, 0, dim) else: msg = ( - "collapsing the bounded string coordinate {0!r}" - " is not supported".format(coord.name()) + "collapsing the bounded string coordinate" + f" {coord.name()!r} is not supported" ) raise ValueError(msg) else: - new_bounds = [] + new_bounds_list = [] if coord.has_bounds(): # Derive new coord's bounds from bounds. item = coord.bounds - maxmin_axis = (dim, -1) + maxmin_axis: Union[int, tuple[int, int]] = (dim, -1) first_choices = coord.bounds.take(0, -1) last_choices = coord.bounds.take(1, -1) @@ -2528,12 +2601,13 @@ def _compute_shared_coords(self): # Construct list of coordinate group boundary pairs. if monotonic: # Use first and last bound or point for new bounds. - for start, stop in groupby_bounds: + for indices in self._groupby_indices: + start, stop = indices[0], indices[-1] if ( getattr(coord, "circular", False) and (stop + 1) == self._stop ): - new_bounds.append( + new_bounds_list.append( [ first_choices.take(start, dim), first_choices.take(0, dim) @@ -2541,7 +2615,7 @@ def _compute_shared_coords(self): ] ) else: - new_bounds.append( + new_bounds_list.append( [ first_choices.take(start, dim), last_choices.take(stop, dim), @@ -2549,9 +2623,9 @@ def _compute_shared_coords(self): ) else: # Use min and max bound or point for new bounds. - for indices in groupby_indices: + for indices in self._groupby_indices: item_slice = item.take(indices, dim) - new_bounds.append( + new_bounds_list.append( [ item_slice.min(axis=maxmin_axis), item_slice.max(axis=maxmin_axis), @@ -2562,7 +2636,7 @@ def _compute_shared_coords(self): # dimension last, and the aggregated dimension back in its # original position. new_bounds = np.moveaxis( - np.array(new_bounds), (0, 1), (dim, -1) + np.array(new_bounds_list), (0, 1), (dim, -1) ) # Now create the new bounded group shared coordinate. @@ -2574,8 +2648,8 @@ def _compute_shared_coords(self): new_points = new_bounds.mean(-1) except TypeError: msg = ( - "The {0!r} coordinate on the collapsing dimension" - " cannot be collapsed.".format(coord.name()) + f"The {coord.name()!r} coordinate on the collapsing" + " dimension cannot be collapsed." ) raise ValueError(msg) @@ -2593,29 +2667,16 @@ def _compute_shared_coords(self): self.coords.append(new_coord) - def __len__(self): + def __len__(self) -> int: """Calculate the number of groups given the group-by coordinates.""" + return len(self.group()) - if self._slices_by_key: - value = len(self._slices_by_key) - else: - value = len([s for s in self.group()]) - - return value - - def __repr__(self): + def __repr__(self) -> str: groupby_coords = [coord.name() for coord in self._groupby_coords] - - if self._shared_coords_by_name: - shared_coords = [coord.name() for coord in self._shared_coords] - shared_string = ", shared_coords=%r)" % shared_coords - else: - shared_string = ")" - - return "%s(%r%s" % ( - self.__class__.__name__, - groupby_coords, - shared_string, + shared_coords = [coord.name() for coord, _ in self._shared_coords] + return ( + f"{self.__class__.__name__}({groupby_coords!r}" + f", shared_coords={shared_coords!r})" ) @@ -2807,7 +2868,7 @@ def __init__(self, mdtol=1): Both sourge and target cubes must have an XY grid defined by separate X and Y dimensions with dimension coordinates. All of the XY dimension coordinates must also be bounded, and have - the same cooordinate system. + the same coordinate system. """ if not (0 <= mdtol <= 1): diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 3b728e9a43..edead3948a 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -433,7 +433,7 @@ def _spherical_area(y_bounds, x_bounds, radius=1.0): Args: * y_bounds: - An (n, 2) shaped NumPy array of latitide bounds in radians. + An (n, 2) shaped NumPy array of latitude bounds in radians. * x_bounds: An (m, 2) shaped NumPy array of longitude bounds in radians. * radius: @@ -586,7 +586,7 @@ def _regrid_area_weighted_array( y_dim = src_data.ndim - 2 # Create empty "pre-averaging" data array that will enable the - # src_data data coresponding to a given target grid point, + # src_data data corresponding to a given target grid point, # to be stacked per point. # Note that dtype is not preserved and that the array mask # allows for regions that do not overlap. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index f5e89a9e51..34dcae3026 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -213,7 +213,7 @@ def __init__(self, src_cube, coords, method, extrapolation_mode): # Trigger any deferred loading of the source cube's data and snapshot # its state to ensure that the interpolator is impervious to external # changes to the original source cube. The data is loaded to prevent - # the snaphot having lazy data, avoiding the potential for the + # the snapshot having lazy data, avoiding the potential for the # same data to be loaded again and again. if src_cube.has_lazy_data(): src_cube.data diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index f1891a48e4..4592a0ede7 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -239,7 +239,7 @@ def _regrid_indices(cells, depth, points): x_indices = _regrid_indices(tx_cells, tx_depth, sx_points) y_indices = _regrid_indices(ty_cells, ty_depth, sy_points) - # Now construct a sparse M x N matix, where M is the flattened target + # Now construct a sparse M x N matrix, where M is the flattened target # space, and N is the flattened source space. The sparse matrix will then # be populated with those source cube points that contribute to a specific # target cube cell. @@ -1021,7 +1021,7 @@ def _create_cube( The dimensions of the X and Y coordinate within the source Cube. tgt_coords : tuple of :class:`iris.coords.Coord`\\ 's Either two 1D :class:`iris.coords.DimCoord`\\ 's, two 1D - :class:`iris.experimental.ugrid.DimCoord`\\ 's or two ND + :class:`iris.experimental.ugrid.DimCoord`\\ 's or two n-D :class:`iris.coords.AuxCoord`\\ 's representing the new grid's X and Y coordinates. num_tgt_dims : int diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index a8e90a63ad..0d17f0b38a 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -15,6 +15,7 @@ import cartopy.crs as ccrs import cartopy.img_transform import cf_units +import dask.array as da import numpy as np import numpy.ma as ma @@ -1012,7 +1013,7 @@ def _transform_distance_vectors_tolerance_mask( """ Return a mask that can be applied to data array to mask elements where the magnitude of vectors are not preserved due to numerical - errors introduced by the tranformation between coordinate systems. + errors introduced by the transformation between coordinate systems. Args: * src_crs (`cartopy.crs.Projection`): @@ -1206,9 +1207,15 @@ def rotate_winds(u_cube, v_cube, target_cs): x = x.transpose() y = y.transpose() - # Create resulting cubes. - ut_cube = u_cube.copy() - vt_cube = v_cube.copy() + # Create resulting cubes - produce lazy output data if at least + # one input cube has lazy data + lazy_output = u_cube.has_lazy_data() or v_cube.has_lazy_data() + if lazy_output: + ut_cube = u_cube.copy(data=da.empty_like(u_cube.lazy_data())) + vt_cube = v_cube.copy(data=da.empty_like(v_cube.lazy_data())) + else: + ut_cube = u_cube.copy() + vt_cube = v_cube.copy() ut_cube.rename("transformed_{}".format(u_cube.name())) vt_cube.rename("transformed_{}".format(v_cube.name())) @@ -1236,8 +1243,12 @@ def rotate_winds(u_cube, v_cube, target_cs): apply_mask = mask.any() if apply_mask: # Make masked arrays to accept masking. - ut_cube.data = ma.asanyarray(ut_cube.data) - vt_cube.data = ma.asanyarray(vt_cube.data) + if lazy_output: + ut_cube = ut_cube.copy(data=da.ma.empty_like(ut_cube.core_data())) + vt_cube = vt_cube.copy(data=da.ma.empty_like(vt_cube.core_data())) + else: + ut_cube.data = ma.asanyarray(ut_cube.data) + vt_cube.data = ma.asanyarray(vt_cube.data) # Project vectors with u, v components one horiz slice at a time and # insert into the resulting cubes. @@ -1250,16 +1261,20 @@ def rotate_winds(u_cube, v_cube, target_cs): for dim in dims: index[dim] = slice(None, None) index = tuple(index) - u = u_cube.data[index] - v = v_cube.data[index] + u = u_cube.core_data()[index] + v = v_cube.core_data()[index] ut, vt = _transform_distance_vectors(u, v, ds, dx2, dy2) if apply_mask: - ut = ma.asanyarray(ut) - ut[mask] = ma.masked - vt = ma.asanyarray(vt) - vt[mask] = ma.masked - ut_cube.data[index] = ut - vt_cube.data[index] = vt + if lazy_output: + ut = da.ma.masked_array(ut, mask=mask) + vt = da.ma.masked_array(vt, mask=mask) + else: + ut = ma.asanyarray(ut) + ut[mask] = ma.masked + vt = ma.asanyarray(vt) + vt[mask] = ma.masked + ut_cube.core_data()[index] = ut + vt_cube.core_data()[index] = vt # Calculate new coords of locations in target coordinate system. xyz_tran = target_crs.transform_points(src_crs, x, y) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 09a02ad51c..b77c6cd80f 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -225,33 +225,35 @@ def _assert_is_cube(cube): @_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): """ - Calculate the sum of two cubes, or the sum of a cube and a - coordinate or scalar value. + Calculate the sum of two cubes, or the sum of a cube and a coordinate or + array or scalar value. - When summing two cubes, they must both have the same coordinate - systems & data resolution. + When summing two cubes, they must both have the same coordinate systems and + data resolution. - When adding a coordinate to a cube, they must both share the same - number of elements along a shared axis. + When adding a coordinate to a cube, they must both share the same number of + elements along a shared axis. - Args: + Parameters + ---------- - * cube: - An instance of :class:`iris.cube.Cube`. - * other: - An instance of :class:`iris.cube.Cube` or :class:`iris.coords.Coord`, - or a number or :class:`numpy.ndarray`. + cube : iris.cube.Cube + First operand to add. - Kwargs: + other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + Second operand to add. - * dim: - If supplying a coord with no match on the cube, you must supply - the dimension to process. - * in_place: - Whether to create a new Cube, or alter the given "cube". + dim : int, optional + If `other` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. - Returns: - An instance of :class:`iris.cube.Cube`. + in_place : bool, default=False + If `True`, alters the input cube. Otherwise a new cube is created. + + Returns + ------- + + iris.cube.Cube Notes ------ @@ -280,32 +282,34 @@ def add(cube, other, dim=None, in_place=False): def subtract(cube, other, dim=None, in_place=False): """ Calculate the difference between two cubes, or the difference between - a cube and a coordinate or scalar value. + a cube and a coordinate or array or scalar value. - When subtracting two cubes, they must both have the same coordinate - systems & data resolution. + When differencing two cubes, they must both have the same coordinate systems + and data resolution. - When subtracting a coordinate to a cube, they must both share the - same number of elements along a shared axis. + When subtracting a coordinate from a cube, they must both share the same + number of elements along a shared axis. - Args: + Parameters + ---------- - * cube: - An instance of :class:`iris.cube.Cube`. - * other: - An instance of :class:`iris.cube.Cube` or :class:`iris.coords.Coord`, - or a number or :class:`numpy.ndarray`. + cube : iris.cube.Cube + Cube from which to subtract. - Kwargs: + other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + Object to subtract from the cube. - * dim: - If supplying a coord with no match on the cube, you must supply - the dimension to process. - * in_place: - Whether to create a new Cube, or alter the given "cube". + dim : int, optional + If `other` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. - Returns: - An instance of :class:`iris.cube.Cube`. + in_place : bool, default=False + If `True`, alters the input cube. Otherwise a new cube is created. + + Returns + ------- + + iris.cube.Cube Notes ------ @@ -348,8 +352,8 @@ def _add_subtract_common( operation_name - the public name of the operation (e.g. 'divide') cube - the cube whose data is used as the first argument to `operation_function` - other - the cube, coord, ndarray or number whose data is - used as the second argument + other - the cube, coord, ndarray, dask array or number whose + data is used as the second argument new_dtype - the expected dtype of the output. Used in the case of scalar masked arrays dim - dimension along which to apply `other` if it's a @@ -384,24 +388,35 @@ def _add_subtract_common( @_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): """ - Calculate the product of a cube and another cube or coordinate. + Calculate the product of two cubes, or the product of a cube and a coordinate + or array or scalar value. - Args: + When multiplying two cubes, they must both have the same coordinate systems + and data resolution. - * cube: - An instance of :class:`iris.cube.Cube`. - * other: - An instance of :class:`iris.cube.Cube` or :class:`iris.coords.Coord`, - or a number or :class:`numpy.ndarray`. + When mulplying a cube by a coordinate, they must both share the same number + of elements along a shared axis. - Kwargs: + Parameters + ---------- - * dim: - If supplying a coord with no match on the cube, you must supply - the dimension to process. + cube : iris.cube.Cube + First operand to multiply. - Returns: - An instance of :class:`iris.cube.Cube`. + other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + Second operand to multiply. + + dim : int, optional + If `other` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. + + in_place : bool, default=False + If `True`, alters the input cube. Otherwise a new cube is created. + + Returns + ------- + + iris.cube.Cube Notes ------ @@ -461,24 +476,35 @@ def _inplace_common_checks(cube, other, math_op): @_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): """ - Calculate the division of a cube by a cube or coordinate. + Calculate the ratio of two cubes, or the ratio of a cube and a coordinate + or array or scalar value. - Args: + When dividing a cube by another cube, they must both have the same coordinate + systems and data resolution. - * cube: - An instance of :class:`iris.cube.Cube`. - * other: - An instance of :class:`iris.cube.Cube` or :class:`iris.coords.Coord`, - or a number or :class:`numpy.ndarray`. + When dividing a cube by a coordinate, they must both share the same number + of elements along a shared axis. - Kwargs: + Parameters + ---------- - * dim: - If supplying a coord with no match on the cube, you must supply - the dimension to process. + cube : iris.cube.Cube + Numerator. - Returns: - An instance of :class:`iris.cube.Cube`. + other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + Denominator. + + dim : int, optional + If `other` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. + + in_place : bool, default=False + If `True`, alters the input cube. Otherwise a new cube is created. + + Returns + ------- + + iris.cube.Cube Notes ------ @@ -842,8 +868,8 @@ def _binary_op_common( operation_name - the public name of the operation (e.g. 'divide') cube - the cube whose data is used as the first argument to `operation_function` - other - the cube, coord, ndarray or number whose data is - used as the second argument + other - the cube, coord, ndarray, dask array or number whose + data is used as the second argument new_dtype - the expected dtype of the output. Used in the case of scalar masked arrays new_unit - unit for the resulting quantity @@ -883,7 +909,10 @@ def _binary_op_common( rhs = other.core_data() else: # The rhs must be an array. - rhs = np.asanyarray(other) + if iris._lazy_data.is_lazy_data(other): + rhs = other + else: + rhs = np.asanyarray(other) def unary_func(lhs): data = operation_function(lhs, rhs) @@ -1194,7 +1223,7 @@ def __call__( Kwargs: * other - A cube, coord, ndarray or number whose data is used as the + A cube, coord, ndarray, dask array or number whose data is used as the second argument to the data function. * new_name: diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 24f7a9dede..84ce89ab6f 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -347,7 +347,7 @@ def interpolate(cube, sample_points, method=None): for columns_coord in columns.dim_coords + columns.aux_coords: src_dims = cube.coord_dims(columns_coord) if not squish_my_dims.isdisjoint(src_dims): - # Mapping the cube indicies onto the coord + # Mapping the cube indices onto the coord initial_coord_inds = [initial_inds[ind] for ind in src_dims] # Making the final ones the same way as for the cube # 0 will always appear in the initial ones because we know this @@ -660,7 +660,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): for c, (coord, coord_dims) in enumerate( sample_space_coords_and_dims ): - # Index of this datum along this coordinate (could be nD). + # Index of this datum along this coordinate (could be n-D). if coord_dims: keys = tuple(ndi[ind] for ind in coord_dims) else: diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index cb3149fe58..7def79f51e 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -52,7 +52,7 @@ def hexdigest(item): """ - Calculate a hexidecimal string hash representation of the provided item. + Calculate a hexadecimal string hash representation of the provided item. Calculates a 64-bit non-cryptographic hash of the provided item, using the extremely fast ``xxhash`` hashing algorithm, and returns the hexdigest @@ -67,7 +67,7 @@ def hexdigest(item): The item that requires to have its hexdigest calculated. Returns: - The string hexidecimal representation of the item's 64-bit hash. + The string hexadecimal representation of the item's 64-bit hash. """ # Special case: deal with numpy arrays. diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index a0c97dfc00..8d5d57d4a4 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -144,7 +144,7 @@ class Resolve: forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'A1B' @@ -162,7 +162,7 @@ class Resolve: height 1.5 m time 1860-06-01 00:00:00, bound=(1859-12-01 00:00:00, 1860-12-01 00:00:00) Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'E1' @@ -185,7 +185,7 @@ class Resolve: forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' STASH m01s03i236 @@ -726,7 +726,7 @@ def _create_prepared_item( If container or type(coord) is DimCoord/AuxCoord (i.e. not MeshCoord), then points+bounds define the built AuxCoord/DimCoord. - Theses points+bounds come either from those args, or the 'coord'. + These points+bounds come either from those args, or the 'coord'. Alternatively, when container or type(coord) is MeshCoord, then points==bounds==None and the preparted item contains mesh/location/axis properties for the resulting MeshCoord. @@ -1014,7 +1014,7 @@ def _assign_mapping(extent, unmapped_local_items, free_items=None): # Map to the first available unmapped local dimension or # the first available free dimension. # Dimension shape doesn't matter here as the extent is 1, - # therefore broadcasting will take care of any discrepency + # therefore broadcasting will take care of any discrepancy # between src and tgt dimension extent. if unmapped_local_items: result, _ = unmapped_local_items.pop(0) @@ -2542,7 +2542,7 @@ def mapped(self): forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'A1B' @@ -2559,7 +2559,7 @@ def mapped(self): height 1.5 m time 1860-06-01 00:00:00, bound=(1859-12-01 00:00:00, 1860-12-01 00:00:00) Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'E1' @@ -2610,7 +2610,7 @@ def shape(self): forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'A1B' @@ -2627,7 +2627,7 @@ def shape(self): height 1.5 m time 1860-06-01 00:00:00, bound=(1859-12-01 00:00:00, 1860-12-01 00:00:00) Cell methods: - mean time (6 hour) + 0 time: mean (interval: 6 hour) Attributes: Conventions 'CF-1.5' Model scenario 'E1' diff --git a/lib/iris/config.py b/lib/iris/config.py index 3659ac7dcd..79d141e53f 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -171,8 +171,7 @@ def get_dir_option(section, option, default=None): ) # Override the data repository if the appropriate environment variable -# has been set. This is used in setup.py in the TestRunner command to -# enable us to simulate the absence of external data. +# has been set. override = os.environ.get("OVERRIDE_TEST_DATA_REPOSITORY") if override: TEST_DATA_DIR = None diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 91bb786ae8..63bc524637 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -10,10 +10,10 @@ from abc import ABCMeta, abstractmethod from collections import namedtuple -from collections.abc import Container, Iterator +from collections.abc import Container import copy from functools import lru_cache -from itertools import chain, zip_longest +from itertools import zip_longest import operator import warnings import zlib @@ -1218,10 +1218,6 @@ def __new__( BOUND_POSITION_END = 1 -# Private named tuple class for coordinate groups. -_GroupbyItem = namedtuple("GroupbyItem", "groupby_point, groupby_slice") - - def _get_2d_coord_bound_grid(bounds): """ Creates a grid using the bounds of a 2D coordinate with 4 sided cells. @@ -1936,11 +1932,12 @@ def _discontiguity_in_bounds(self, rtol=1e-5, atol=1e-8): * contiguous: (boolean) True if there are no discontiguities. * diffs: (array or tuple of arrays) - The diffs along the bounds of the coordinate. If self is a 2D - coord of shape (Y, X), a tuple of arrays is returned, where the - first is an array of differences along the x-axis, of the shape - (Y, X-1) and the second is an array of differences along the - y-axis, of the shape (Y-1, X). + A boolean array or tuple of boolean arrays which are true where + there are discontiguities between neighbouring bounds. If self is + a 2D coord of shape (Y, X), a pair of arrays is returned, where + the first is an array of differences along the x-axis, of the + shape (Y, X-1) and the second is an array of differences along + the y-axis, of the shape (Y-1, X). """ self._sanity_check_bounds() @@ -1949,7 +1946,9 @@ def _discontiguity_in_bounds(self, rtol=1e-5, atol=1e-8): contiguous = np.allclose( self.bounds[1:, 0], self.bounds[:-1, 1], rtol=rtol, atol=atol ) - diffs = np.abs(self.bounds[:-1, 1] - self.bounds[1:, 0]) + diffs = ~np.isclose( + self.bounds[1:, 0], self.bounds[:-1, 1], rtol=rtol, atol=atol + ) elif self.ndim == 2: @@ -1957,31 +1956,55 @@ def mod360_adjust(compare_axis): bounds = self.bounds.copy() if compare_axis == "x": - upper_bounds = bounds[:, :-1, 1] - lower_bounds = bounds[:, 1:, 0] + # Extract the pairs of upper bounds and lower bounds which + # connect along the "x" axis. These connect along indices + # as shown by the following diagram: + # + # 3---2 + 3---2 + # | | | | + # 0---1 + 0---1 + upper_bounds = np.stack( + (bounds[:, :-1, 1], bounds[:, :-1, 2]) + ) + lower_bounds = np.stack( + (bounds[:, 1:, 0], bounds[:, 1:, 3]) + ) elif compare_axis == "y": - upper_bounds = bounds[:-1, :, 3] - lower_bounds = bounds[1:, :, 0] + # Extract the pairs of upper bounds and lower bounds which + # connect along the "y" axis. These connect along indices + # as shown by the following diagram: + # + # 3---2 + # | | + # 0---1 + # + + + # 3---2 + # | | + # 0---1 + upper_bounds = np.stack( + (bounds[:-1, :, 3], bounds[:-1, :, 2]) + ) + lower_bounds = np.stack( + (bounds[1:, :, 0], bounds[1:, :, 1]) + ) if self.name() in ["longitude", "grid_longitude"]: # If longitude, adjust for longitude wrapping diffs = upper_bounds - lower_bounds - index = diffs > 180 + index = np.abs(diffs) > 180 if index.any(): sign = np.sign(diffs) modification = (index.astype(int) * 360) * sign upper_bounds -= modification - diffs_between_cells = np.abs(upper_bounds - lower_bounds) - cell_size = lower_bounds - upper_bounds - diffs_along_axis = diffs_between_cells > ( - atol + rtol * cell_size + diffs_along_bounds = ~np.isclose( + upper_bounds, lower_bounds, rtol=rtol, atol=atol ) - - points_close_enough = diffs_along_axis <= ( - atol + rtol * cell_size + diffs_along_axis = np.logical_or( + diffs_along_bounds[0], diffs_along_bounds[1] ) - contiguous_along_axis = np.all(points_close_enough) + + contiguous_along_axis = ~np.any(diffs_along_axis) return diffs_along_axis, contiguous_along_axis diffs_along_x, match_cell_x1 = mod360_adjust(compare_axis="x") @@ -3078,23 +3101,23 @@ def __init__(self, method, coords=None, intervals=None, comments=None): def __str__(self): """Return a custom string representation of CellMethod""" # Group related coord names intervals and comments together - cell_components = zip_longest( - self.coord_names, self.intervals, self.comments, fillvalue="" + coord_string = " ".join([f"{coord}:" for coord in self.coord_names]) + method_string = str(self.method) + interval_string = " ".join( + [f"interval: {interval}" for interval in self.intervals] ) + comment_string = " ".join([comment for comment in self.comments]) - collection_summaries = [] - cm_summary = "%s: " % self.method - - for coord_name, interval, comment in cell_components: - other_info = ", ".join(filter(None, chain((interval, comment)))) - if other_info: - coord_summary = "%s (%s)" % (coord_name, other_info) - else: - coord_summary = "%s" % coord_name + if interval_string and comment_string: + comment_string = "".join( + [f" comment: {comment}" for comment in self.comments] + ) + cm_summary = f"{coord_string} {method_string}" - collection_summaries.append(coord_summary) + if interval_string or comment_string: + cm_summary += f" ({interval_string}{comment_string})" - return cm_summary + ", ".join(collection_summaries) + return cm_summary def __add__(self, other): # Disable the default tuple behaviour of tuple concatenation @@ -3131,26 +3154,3 @@ def xml_element(self, doc): cellMethod_xml_element.appendChild(coord_xml_element) return cellMethod_xml_element - - -# See ExplicitCoord._group() for the description/context. -class _GroupIterator(Iterator): - def __init__(self, points): - self._points = points - self._start = 0 - - def __next__(self): - num_points = len(self._points) - if self._start >= num_points: - raise StopIteration - - stop = self._start + 1 - m = self._points[self._start] - while stop < num_points and self._points[stop] == m: - stop += 1 - - group = _GroupbyItem(m, slice(self._start, stop)) - self._start = stop - return group - - next = __next__ diff --git a/lib/iris/cube.py b/lib/iris/cube.py index abe37c35fb..4c52303b2f 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -28,6 +28,7 @@ import iris._lazy_data as _lazy import iris._merge import iris.analysis +from iris.analysis import _Weights from iris.analysis.cartography import wrap_lons import iris.analysis.maths import iris.aux_factory @@ -541,6 +542,7 @@ def concatenate_cube( check_aux_coords=True, check_cell_measures=True, check_ancils=True, + check_derived_coords=True, ): """ Return the concatenated contents of the :class:`CubeList` as a single @@ -553,20 +555,30 @@ def concatenate_cube( Kwargs: * check_aux_coords - Checks the auxiliary coordinates of the cubes match. This check - is not applied to auxiliary coordinates that span the dimension - the concatenation is occurring along. Defaults to True. + Checks if the points and bounds of auxiliary coordinates of the + cubes match. This check is not applied to auxiliary coordinates + that span the dimension the concatenation is occurring along. + Defaults to True. * check_cell_measures - Checks the cell measures of the cubes match. This check - is not applied to cell measures that span the dimension - the concatenation is occurring along. Defaults to True. + Checks if the data of cell measures of the cubes match. This check + is not applied to cell measures that span the dimension the + concatenation is occurring along. Defaults to True. * check_ancils - Checks the ancillary variables of the cubes match. This check - is not applied to ancillary variables that span the dimension + Checks if the data of ancillary variables of the cubes match. This + check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to True. + * check_derived_coords + Checks if the points and bounds of derived coordinates of the cubes + match. This check is not applied to derived coordinates that span + the dimension the concatenation is occurring along. Note that + differences in scalar coordinates and dimensional coordinates used + to derive the coordinate are still checked. Checks for auxiliary + coordinates used to derive the coordinates can be ignored with + `check_aux_coords`. Defaults to True. + .. note:: Concatenation cannot occur along an anonymous dimension. @@ -586,6 +598,7 @@ def concatenate_cube( check_aux_coords=check_aux_coords, check_cell_measures=check_cell_measures, check_ancils=check_ancils, + check_derived_coords=check_derived_coords, ) n_res_cubes = len(res) if n_res_cubes == 1: @@ -612,6 +625,7 @@ def concatenate( check_aux_coords=True, check_cell_measures=True, check_ancils=True, + check_derived_coords=True, ): """ Concatenate the cubes over their common dimensions. @@ -619,20 +633,30 @@ def concatenate( Kwargs: * check_aux_coords - Checks the auxiliary coordinates of the cubes match. This check - is not applied to auxiliary coordinates that span the dimension - the concatenation is occurring along. Defaults to True. + Checks if the points and bounds of auxiliary coordinates of the + cubes match. This check is not applied to auxiliary coordinates + that span the dimension the concatenation is occurring along. + Defaults to True. * check_cell_measures - Checks the cell measures of the cubes match. This check - is not applied to cell measures that span the dimension - the concatenation is occurring along. Defaults to True. + Checks if the data of cell measures of the cubes match. This check + is not applied to cell measures that span the dimension the + concatenation is occurring along. Defaults to True. * check_ancils - Checks the ancillary variables of the cubes match. This check - is not applied to ancillary variables that span the dimension + Checks if the data of ancillary variables of the cubes match. This + check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to True. + * check_derived_coords + Checks if the points and bounds of derived coordinates of the cubes + match. This check is not applied to derived coordinates that span + the dimension the concatenation is occurring along. Note that + differences in scalar coordinates and dimensional coordinates used + to derive the coordinate are still checked. Checks for auxiliary + coordinates used to derive the coordinates can be ignored with + `check_aux_coords`. Defaults to True. + Returns: A new :class:`iris.cube.CubeList` of concatenated :class:`iris.cube.Cube` instances. @@ -717,6 +741,7 @@ def concatenate( check_aux_coords=check_aux_coords, check_cell_measures=check_cell_measures, check_ancils=check_ancils, + check_derived_coords=check_derived_coords, ) def realise_data(self): @@ -787,8 +812,8 @@ class Cube(CFVariableMixin): time \ 1998-12-01 00:00:00, bound=(1994-12-01 00:00:00, 1998-12-01 00:00:00) Cell methods: - mean within years time - mean over years time + 0 time: mean within years + 1 time: mean over years Attributes: STASH m01s16i203 source 'Data from Met Office Unified Model' @@ -3721,9 +3746,15 @@ def collapsed(self, coords, aggregator, **kwargs): sum :data:`~iris.analysis.SUM`. Weighted aggregations support an optional *weights* keyword argument. - If set, this should be supplied as an array of weights whose shape - matches the cube. Values for latitude-longitude area weights may be - calculated using :func:`iris.analysis.cartography.area_weights`. + If set, this can be supplied as an array, cube, or (names of) + :meth:`~iris.cube.Cube.coords`, :meth:`~iris.cube.Cube.cell_measures`, + or :meth:`~iris.cube.Cube.ancillary_variables`. In all cases, the + weights should be 1d (for collapsing over a 1d coordinate) or match the + shape of the cube. When weights are not given as arrays, units are + correctly handled for weighted sums, i.e., the original unit of the + cube is multiplied by the units of the weights. Values for + latitude-longitude area weights may be calculated using + :func:`iris.analysis.cartography.area_weights`. Some Iris aggregators support "lazy" evaluation, meaning that cubes resulting from this method may represent data arrays which are @@ -3768,8 +3799,8 @@ def collapsed(self, coords, aggregator, **kwargs): longitude \ 180.0 degrees, bound=(0.0, 360.0) degrees Cell methods: - mean month, year - mean longitude + 0 month: year: mean + 1 longitude: mean Attributes: Conventions 'CF-1.5' STASH m01s00i024 @@ -3802,6 +3833,10 @@ def collapsed(self, coords, aggregator, **kwargs): cube.collapsed(['latitude', 'longitude'], iris.analysis.VARIANCE) """ + # Update weights kwargs (if necessary) to handle different types of + # weights + _Weights.update_kwargs(kwargs, self) + # Convert any coordinate names to coordinates coords = self._as_list_of_coords(coords) @@ -3970,10 +4005,14 @@ def aggregated_by( also be supplied. These include :data:`~iris.analysis.MEAN` and :data:`~iris.analysis.SUM`. - Weighted aggregations support an optional *weights* keyword argument. If - set, this should be supplied as an array of weights whose shape matches - the cube or as 1D array whose length matches the dimension over which is - aggregated. + Weighted aggregations support an optional *weights* keyword argument. + If set, this can be supplied as an array, cube, or (names of) + :meth:`~iris.cube.Cube.coords`, :meth:`~iris.cube.Cube.cell_measures`, + or :meth:`~iris.cube.Cube.ancillary_variables`. In all cases, the + weights should be 1d or match the shape of the cube. When weights are + not given as arrays, units are correctly handled for weighted sums, + i.e., the original unit of the cube is multiplied by the units of the + weights. Parameters ---------- @@ -4025,13 +4064,17 @@ def aggregated_by( Scalar coordinates: forecast_period 0 hours Cell methods: - mean month, year - mean year + 0 month: year: mean + 1 year: mean Attributes: Conventions 'CF-1.5' STASH m01s00i024 """ + # Update weights kwargs (if necessary) to handle different types of + # weights + _Weights.update_kwargs(kwargs, self) + groupby_coords = [] dimension_to_groupby = None @@ -4070,10 +4113,16 @@ def aggregated_by( f"that is aggregated, got {len(weights):d}, expected " f"{self.shape[dimension_to_groupby]:d}" ) - weights = iris.util.broadcast_to_shape( - weights, - self.shape, - (dimension_to_groupby,), + + # iris.util.broadcast_to_shape does not preserve _Weights type + weights = _Weights( + iris.util.broadcast_to_shape( + weights, + self.shape, + (dimension_to_groupby,), + ), + self, + units=weights.units, ) if weights.shape != self.shape: raise ValueError( @@ -4129,98 +4178,65 @@ def aggregated_by( data_shape = list(self.shape + aggregator.aggregate_shape(**kwargs)) data_shape[dimension_to_groupby] = len(groupby) - # Aggregate the group-by data. + # Choose appropriate data and functions for data aggregation. if aggregator.lazy_func is not None and self.has_lazy_data(): - front_slice = (slice(None, None),) * dimension_to_groupby - back_slice = (slice(None, None),) * ( - len(data_shape) - dimension_to_groupby - 1 - ) + stack = da.stack + input_data = self.lazy_data() + agg_method = aggregator.lazy_aggregate + else: + input_data = self.data + # Note numpy.stack does not preserve masks. + stack = ma.stack if ma.isMaskedArray(input_data) else np.stack + agg_method = aggregator.aggregate + + # Create data and weights slices. + front_slice = (slice(None),) * dimension_to_groupby + back_slice = (slice(None),) * ( + len(data_shape) - dimension_to_groupby - 1 + ) + + groupby_subarrs = map( + lambda groupby_slice: iris.util._slice_data_with_keys( + input_data, front_slice + (groupby_slice,) + back_slice + )[1], + groupby.group(), + ) - # Create cube and weights slices - groupby_subcubes = map( - lambda groupby_slice: self[ + if weights is not None: + groupby_subweights = map( + lambda groupby_slice: weights[ front_slice + (groupby_slice,) + back_slice - ].lazy_data(), + ], groupby.group(), ) - if weights is not None: - groupby_subweights = map( - lambda groupby_slice: weights[ - front_slice + (groupby_slice,) + back_slice - ], - groupby.group(), - ) - else: - groupby_subweights = (None for _ in range(len(groupby))) + else: + groupby_subweights = (None for _ in range(len(groupby))) - agg = iris.analysis.create_weighted_aggregator_fn( - aggregator.lazy_aggregate, axis=dimension_to_groupby, **kwargs + # Aggregate data slices. + agg = iris.analysis.create_weighted_aggregator_fn( + agg_method, axis=dimension_to_groupby, **kwargs + ) + result = list(map(agg, groupby_subarrs, groupby_subweights)) + + # If weights are returned, "result" is a list of tuples (each tuple + # contains two elements; the first is the aggregated data, the + # second is the aggregated weights). Convert these to two lists + # (one for the aggregated data and one for the aggregated weights) + # before combining the different slices. + if return_weights: + result, weights_result = list(zip(*result)) + aggregateby_weights = stack( + weights_result, axis=dimension_to_groupby ) - result = list(map(agg, groupby_subcubes, groupby_subweights)) - - # If weights are returned, "result" is a list of tuples (each tuple - # contains two elements; the first is the aggregated data, the - # second is the aggregated weights). Convert these to two lists - # (one for the aggregated data and one for the aggregated weights) - # before combining the different slices. - if return_weights: - result, weights_result = list(zip(*result)) - aggregateby_weights = da.stack( - weights_result, axis=dimension_to_groupby - ) - else: - aggregateby_weights = None - aggregateby_data = da.stack(result, axis=dimension_to_groupby) else: - cube_slice = [slice(None, None)] * len(data_shape) - for i, groupby_slice in enumerate(groupby.group()): - # Slice the cube with the group-by slice to create a group-by - # sub-cube. - cube_slice[dimension_to_groupby] = groupby_slice - groupby_sub_cube = self[tuple(cube_slice)] - - # Slice the weights - if weights is not None: - groupby_sub_weights = weights[tuple(cube_slice)] - kwargs["weights"] = groupby_sub_weights - - # Perform the aggregation over the group-by sub-cube and - # repatriate the aggregated data into the aggregate-by cube - # data. If weights are also returned, handle them separately. - result = aggregator.aggregate( - groupby_sub_cube.data, axis=dimension_to_groupby, **kwargs - ) - if return_weights: - weights_result = result[1] - result = result[0] - else: - weights_result = None - - # Determine aggregation result data type for the aggregate-by - # cube data on first pass. - if i == 0: - if ma.isMaskedArray(self.data): - aggregateby_data = ma.zeros( - data_shape, dtype=result.dtype - ) - else: - aggregateby_data = np.zeros( - data_shape, dtype=result.dtype - ) - if weights_result is not None: - aggregateby_weights = np.zeros( - data_shape, dtype=weights_result.dtype - ) - else: - aggregateby_weights = None - cube_slice[dimension_to_groupby] = i - aggregateby_data[tuple(cube_slice)] = result - if weights_result is not None: - aggregateby_weights[tuple(cube_slice)] = weights_result + aggregateby_weights = None - # Restore original weights. - if weights is not None: - kwargs["weights"] = weights + aggregateby_data = stack(result, axis=dimension_to_groupby) + # Ensure plain ndarray is output if plain ndarray was input. + if ma.isMaskedArray(aggregateby_data) and not ma.isMaskedArray( + input_data + ): + aggregateby_data = ma.getdata(aggregateby_data) # Add the aggregation meta data to the aggregate-by cube. aggregator.update_metadata( @@ -4289,8 +4305,11 @@ def rolling_window(self, coord, aggregator, window, **kwargs): * kwargs: Aggregator and aggregation function keyword arguments. The weights - argument to the aggregator, if any, should be a 1d array with the - same length as the chosen window. + argument to the aggregator, if any, should be a 1d array, cube, or + (names of) :meth:`~iris.cube.Cube.coords`, + :meth:`~iris.cube.Cube.cell_measures`, or + :meth:`~iris.cube.Cube.ancillary_variables` with the same length as + the chosen window. Returns: :class:`iris.cube.Cube`. @@ -4321,7 +4340,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): forecast_reference_time 2011-07-23 00:00:00 realization 10 Cell methods: - mean time (1 hour) + 0 time: mean (interval: 1 hour) Attributes: STASH m01s00i024 source \ @@ -4346,8 +4365,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): forecast_reference_time 2011-07-23 00:00:00 realization 10 Cell methods: - mean time (1 hour) - mean time + 0 time: mean (interval: 1 hour) + 1 time: mean Attributes: STASH m01s00i024 source \ @@ -4358,6 +4377,10 @@ def rolling_window(self, coord, aggregator, window, **kwargs): possible windows of size 3 from the original cube. """ + # Update weights kwargs (if necessary) to handle different types of + # weights + _Weights.update_kwargs(kwargs, self) + coord = self._as_list_of_coords(coord)[0] if getattr(coord, "circular", False): @@ -4459,8 +4482,14 @@ def rolling_window(self, coord, aggregator, window, **kwargs): "as the window." ) kwargs = dict(kwargs) - kwargs["weights"] = iris.util.broadcast_to_shape( - weights, rolling_window_data.shape, (dimension + 1,) + + # iris.util.broadcast_to_shape does not preserve _Weights type + kwargs["weights"] = _Weights( + iris.util.broadcast_to_shape( + weights, rolling_window_data.shape, (dimension + 1,) + ), + self, + units=weights.units, ) data_result = aggregator.aggregate( rolling_window_data, axis=dimension + 1, **kwargs diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 7c5d8e99cc..76c6002d2b 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -295,23 +295,23 @@ def __init__(self, src_cube, tgt_grid_cube, method, projection=None): if src_x_coord.coord_system != src_y_coord.coord_system: raise ValueError( "'src_cube' lateral geographic coordinates have " - "differing coordinate sytems." + "differing coordinate systems." ) if src_x_coord.coord_system is None: raise ValueError( "'src_cube' lateral geographic coordinates have " - "no coordinate sytem." + "no coordinate system." ) tgt_x_coord, tgt_y_coord = get_xy_dim_coords(tgt_grid_cube) if tgt_x_coord.coord_system != tgt_y_coord.coord_system: raise ValueError( "'tgt_grid_cube' lateral geographic coordinates " - "have differing coordinate sytems." + "have differing coordinate systems." ) if tgt_x_coord.coord_system is None: raise ValueError( "'tgt_grid_cube' lateral geographic coordinates " - "have no coordinate sytem." + "have no coordinate system." ) if projection is None: @@ -572,12 +572,12 @@ def __call__(self, src_cube): if src_x_coord.coord_system != src_y_coord.coord_system: raise ValueError( "'src' lateral geographic coordinates have " - "differing coordinate sytems." + "differing coordinate systems." ) if src_cs is None: raise ValueError( "'src' lateral geographic coordinates have " - "no coordinate sytem." + "no coordinate system." ) # Check the source grid units. diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index a522d91313..cfa3935991 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -209,7 +209,8 @@ def load_meshes(uris, var_name=None): result = {} for source in valid_sources: - meshes_dict = _meshes_from_cf(CFUGridReader(source)) + with CFUGridReader(source) as cf_reader: + meshes_dict = _meshes_from_cf(cf_reader) meshes = list(meshes_dict.values()) if var_name is not None: meshes = list(filter(lambda m: m.var_name == var_name, meshes)) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 0d566da73f..af557c345c 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -2855,7 +2855,7 @@ def __init__( # N.B. at present, coords in a Mesh are stored+accessed by 'axis', which # means they must have a standard_name. So ... - # (a) the 'location' (face/edge) coord *always* has a useable phenomenon + # (a) the 'location' (face/edge) coord *always* has a usable phenomenon # identity. # (b) we still want to check that location+node coords have the same # phenomenon (i.e. physical meaning identity + units), **but** ... diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index 4efab6490b..a13a43d3fd 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -220,7 +220,7 @@ def recombine_submeshes( # Use the mesh_dim to transpose inputs + outputs, if required, as it is # simpler for all the array operations to always have the mesh dim *last*. if mesh_dim == mesh_cube.ndim - 1: - # Mesh dim is already the last one : no tranpose required + # Mesh dim is already the last one : no transpose required untranspose_dims = None else: dim_range = np.arange(mesh_cube.ndim, dtype=int) diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 96a848deb0..86b304b82c 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -9,6 +9,7 @@ """ from iris.io.format_picker import ( + DataSourceObjectProtocol, FileExtension, FormatAgent, FormatSpecification, @@ -125,16 +126,34 @@ def _load_grib(*args, **kwargs): ) -_nc_dap = FormatSpecification( - "NetCDF OPeNDAP", - UriProtocol(), - lambda protocol: protocol in ["http", "https"], - netcdf.load_cubes, - priority=6, - constraint_aware_handler=True, +FORMAT_AGENT.add_spec( + FormatSpecification( + "NetCDF OPeNDAP", + UriProtocol(), + lambda protocol: protocol in ["http", "https"], + netcdf.load_cubes, + priority=6, + constraint_aware_handler=True, + ) +) + +# NetCDF file presented as an open, readable netCDF4 dataset (or mimic). +FORMAT_AGENT.add_spec( + FormatSpecification( + "NetCDF dataset", + DataSourceObjectProtocol(), + lambda object: all( + hasattr(object, x) + for x in ("variables", "dimensions", "groups", "ncattrs") + ), + # Note: this uses the same call as the above "NetCDF_v4" (and "NetCDF OPeNDAP") + # The handler itself needs to detect what is passed + handle it appropriately. + netcdf.load_cubes, + priority=4, + constraint_aware_handler=True, + ) ) -FORMAT_AGENT.add_spec(_nc_dap) -del _nc_dap + # # UM Fieldsfiles. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 35163c47d5..bbf9c660c5 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -13,6 +13,8 @@ build routines, and which it does not use. """ +import re +from typing import List import warnings import cf_units @@ -28,10 +30,6 @@ import iris.exceptions import iris.fileformats.cf as cf import iris.fileformats.netcdf -from iris.fileformats.netcdf import ( - UnknownCellMethodWarning, - parse_cell_methods, -) from iris.fileformats.netcdf.loader import _get_cf_var_data import iris.std_names import iris.util @@ -184,6 +182,210 @@ CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" +################################################################################ +# Handling of cell-methods. + +_CM_COMMENT = "comment" +_CM_EXTRA = "extra" +_CM_INTERVAL = "interval" +_CM_METHOD = "method" +_CM_NAME = "name" +_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") +_CM_PARSE = re.compile( + r""" + (?P([\w_]+\s*?:\s+)+) + (?P[\w_\s]+(?![\w_]*\s*?:))\s* + (?: + \(\s* + (?P.+) + \)\s* + )? + """, + re.VERBOSE, +) + +# Cell methods. +_CM_KNOWN_METHODS = [ + "point", + "sum", + "mean", + "maximum", + "minimum", + "mid_range", + "standard_deviation", + "variance", + "mode", + "median", +] + + +def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: + """ + Split a CF cell_methods attribute string into a list of zero or more cell + methods, each of which is then parsed with a regex to return a list of match + objects. + + Args: + + * nc_cell_methods: The value of the cell methods attribute to be split. + + Returns: + + * nc_cell_methods_matches: A list of the re.Match objects associated with + each parsed cell method + + Splitting is done based on words followed by colons outside of any brackets. + Validation of anything other than being laid out in the expected format is + left to the calling function. + """ + + # Find name candidates + name_start_inds = [] + for m in _CM_PARSE_NAME.finditer(nc_cell_methods): + name_start_inds.append(m.start()) + + # Remove those that fall inside brackets + bracket_depth = 0 + for ind, cha in enumerate(nc_cell_methods): + if cha == "(": + bracket_depth += 1 + elif cha == ")": + bracket_depth -= 1 + if bracket_depth < 0: + msg = ( + "Cell methods may be incorrectly parsed due to mismatched " + "brackets" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + if bracket_depth > 0 and ind in name_start_inds: + name_start_inds.remove(ind) + + # List tuples of indices of starts and ends of the cell methods in the string + method_indices = [] + for ii in range(len(name_start_inds) - 1): + method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) + method_indices.append((name_start_inds[-1], len(nc_cell_methods))) + + # Index the string and match against each substring + nc_cell_methods_matches = [] + for start_ind, end_ind in method_indices: + nc_cell_method_str = nc_cell_methods[start_ind:end_ind] + nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) + if not nc_cell_method_match: + msg = ( + f"Failed to fully parse cell method string: {nc_cell_methods}" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + continue + nc_cell_methods_matches.append(nc_cell_method_match) + + return nc_cell_methods_matches + + +class UnknownCellMethodWarning(Warning): + pass + + +def parse_cell_methods(nc_cell_methods): + """ + Parse a CF cell_methods attribute string into a tuple of zero or + more CellMethod instances. + + Args: + + * nc_cell_methods (str): + The value of the cell methods attribute to be parsed. + + Returns: + + * cell_methods + An iterable of :class:`iris.coords.CellMethod`. + + Multiple coordinates, intervals and comments are supported. + If a method has a non-standard name a warning will be issued, but the + results are not affected. + + """ + + cell_methods = [] + if nc_cell_methods is not None: + for m in _split_cell_methods(nc_cell_methods): + d = m.groupdict() + method = d[_CM_METHOD] + method = method.strip() + # Check validity of method, allowing for multi-part methods + # e.g. mean over years. + method_words = method.split() + if method_words[0].lower() not in _CM_KNOWN_METHODS: + msg = "NetCDF variable contains unknown cell method {!r}" + warnings.warn( + msg.format("{}".format(method_words[0])), + UnknownCellMethodWarning, + ) + d[_CM_METHOD] = method + name = d[_CM_NAME] + name = name.replace(" ", "") + name = name.rstrip(":") + d[_CM_NAME] = tuple([n for n in name.split(":")]) + interval = [] + comment = [] + if d[_CM_EXTRA] is not None: + # + # tokenise the key words and field colon marker + # + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "comment:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "interval:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") + if len(d[_CM_EXTRA]) == 1: + comment.extend(d[_CM_EXTRA]) + else: + next_field_type = comment + for field in d[_CM_EXTRA]: + field_type = next_field_type + index = field.rfind("<>") + if index == 0: + next_field_type = interval + continue + elif index > 0: + next_field_type = interval + else: + index = field.rfind("<>") + if index == 0: + next_field_type = comment + continue + elif index > 0: + next_field_type = comment + if index != -1: + field = field[:index] + field_type.append(field.strip()) + # + # cater for a shared interval over multiple axes + # + if len(interval): + if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: + interval = interval * len(d[_CM_NAME]) + # + # cater for a shared comment over multiple axes + # + if len(comment): + if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: + comment = comment * len(d[_CM_NAME]) + d[_CM_INTERVAL] = tuple(interval) + d[_CM_COMMENT] = tuple(comment) + cell_method = iris.coords.CellMethod( + d[_CM_METHOD], + coords=d[_CM_NAME], + intervals=d[_CM_INTERVAL], + comments=d[_CM_COMMENT], + ) + cell_methods.append(cell_method) + return tuple(cell_methods) + + ################################################################################ def build_cube_metadata(engine): """Add the standard meta data to the cube.""" @@ -347,7 +549,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None ) - # The following accounts for the inconsistancy in the transverse + # The following accounts for the inconsistency in the transverse # mercator description within the CF spec. if longitude_of_central_meridian is None: longitude_of_central_meridian = getattr( @@ -670,7 +872,7 @@ def get_attr_units(cf_var, attributes): ): attr_units = cf_units._NO_UNIT_STRING - # Get any assoicated calendar for a time reference coordinate. + # Get any associated calendar for a time reference coordinate. if cf_units.as_unit(attr_units).is_time_reference(): attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) @@ -727,7 +929,7 @@ def get_cf_bounds_var(cf_coord_var): attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) - # Determine bounds, prefering standard bounds over climatology. + # Determine bounds, preferring standard bounds over climatology. # NB. No need to raise a warning if the bounds/climatology # variable is missing, as that will already have been done by # iris.fileformats.cf. @@ -1270,7 +1472,7 @@ def _is_rotated(engine, cf_name, cf_attr_value): ################################################################################ def is_rotated_latitude(engine, cf_name): - """Determine whether the CF coodinate variable is rotated latitude.""" + """Determine whether the CF coordinate variable is rotated latitude.""" return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index a3a23dc323..2ed01846bd 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -20,10 +20,10 @@ import re import warnings -import netCDF4 import numpy as np import numpy.ma as ma +from iris.fileformats.netcdf import _thread_safe_nc import iris.util # @@ -1043,15 +1043,25 @@ class CFReader: # TODO: remove once iris.experimental.ugrid.CFUGridReader is folded in. CFGroup = CFGroup - def __init__(self, filename, warn=False, monotonic=False): - self._dataset = None - self._filename = os.path.expanduser(filename) + def __init__(self, file_source, warn=False, monotonic=False): + # Ensure safe operation for destructor, should init fail. + self._own_file = False + if isinstance(file_source, str): + # Create from filepath : open it + own it (=close when we die). + self._filename = os.path.expanduser(file_source) + self._dataset = _thread_safe_nc.DatasetWrapper( + self._filename, mode="r" + ) + self._own_file = True + else: + # We have been passed an open dataset. + # We use it but don't own it (don't close it). + self._dataset = file_source + self._filename = self._dataset.filepath() #: Collection of CF-netCDF variables associated with this netCDF file self.cf_group = self.CFGroup() - self._dataset = netCDF4.Dataset(self._filename, mode="r") - # Issue load optimisation warning. if warn and self._dataset.file_format in [ "NETCDF3_CLASSIC", @@ -1068,6 +1078,19 @@ def __init__(self, filename, warn=False, monotonic=False): self._build_cf_groups() self._reset() + def __enter__(self): + # Enable use as a context manager + # N.B. this **guarantees* closure of the file, when the context is exited. + # Note: ideally, the class would not do so much work in the __init__ call, and + # would do all that here, after acquiring necessary permissions/locks. + # But for legacy reasons, we can't do that. So **effectively**, the context + # (in terms of access control) already started, when we created the object. + return self + + def __exit__(self, exc_type, exc_value, traceback): + # When used as a context-manager, **always** close the file on exit. + self._close() + @property def filename(self): """The file that the CFReader is reading.""" @@ -1294,10 +1317,15 @@ def _reset(self): for nc_var_name in self._dataset.variables.keys(): self.cf_group[nc_var_name].cf_attrs_reset() - def __del__(self): + def _close(self): # Explicitly close dataset to prevent file remaining open. - if self._dataset is not None: + if self._own_file and self._dataset is not None: self._dataset.close() + self._dataset = None + + def __del__(self): + # Be sure to close dataset when CFReader is destroyed / garbage-collected. + self._close() def _getncattr(dataset, attr, default=None): diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index a0b799697d..9a779cc92d 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -8,7 +8,7 @@ def _get_NAME_loader(filename): """ - Return the approriate load function for a NAME file based + Return the appropriate load function for a NAME file based on the contents of its header. """ diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index b9b64a343e..0189a8806f 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -588,7 +588,7 @@ def _build_cell_methods(av_or_ints, coord): Args: * av_or_ints (iterable of strings): - An iterable of strings containing the colummn heading entries + An iterable of strings containing the column heading entries to be parsed. * coord (string or :class:`iris.coords.Coord`): The coordinate name (or :class:`iris.coords.Coord` instance) @@ -1079,7 +1079,7 @@ def load_NAMEIII_version2(filename): elif zunits == "Pa": z_name = "air_pressure" else: - ValueError("Vertical coordinate unkown") + ValueError("Vertical coordinate unknown") zindex = data.index(zgrid[0]) dim_coords.append("Z") diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 505e173b0b..b696b200ff 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -18,6 +18,11 @@ # Note: *must* be done before importing from submodules, as they also use this ! logger = iris.config.get_logger(__name__) +# Note: these probably shouldn't be public, but for now they are. +from .._nc_load_rules.helpers import ( + UnknownCellMethodWarning, + parse_cell_methods, +) from .loader import DEBUG, NetCDFDataProxy, load_cubes from .saver import ( CF_CONVENTIONS_VERSION, @@ -25,8 +30,6 @@ SPATIO_TEMPORAL_AXES, CFNameCoordMap, Saver, - UnknownCellMethodWarning, - parse_cell_methods, save, ) diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py new file mode 100644 index 0000000000..15ac117a8b --- /dev/null +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -0,0 +1,140 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module containing code to create locks enabling dask workers to co-operate. + +This matter is complicated by needing different solutions for different dask scheduler +types, i.e. local 'threads' scheduler, local 'processes' or distributed. + +In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset +targeting an output file, and creates a Saver.file_write_lock object to serialise +write-accesses to the file from dask tasks : All dask-task file writes go via a +"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link +to the Saver.file_write_lock, and uses it to prevent workers from fouling each other. + +For each chunk written, the NetCDFWriteProxy acquires the common per-file lock; +opens a Dataset on the file; performs a write to the relevant variable; closes the +Dataset and then releases the lock. This process is obviously very similar to what the +NetCDFDataProxy does for reading lazy chunks. + +For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers +(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those +contain the common lock, and this is simply **the same object** for all workers, since +they share an address space. + +For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is +identified with the output filepath. This is distributed to the workers by +serialising the task function arguments, which will include the NetCDFWriteProxy. +A worker behaves like a process, though it may execute on a remote machine. When a +distributed.Lock is deserialised to reconstruct the worker task, this creates an object +that communicates with the scheduler. These objects behave as a single common lock, +as they all have the same string 'identity', so the scheduler implements inter-process +communication so that they can mutually exclude each other. + +It is also *conceivable* that multiple processes could write to the same file in +parallel, if the operating system supports it. However, this also requires that the +libnetcdf C library is built with parallel access option, which is not common. +With the "ordinary" libnetcdf build, a process which attempts to open for writing a file +which is _already_ open for writing simply raises an access error. +In any case, Iris netcdf saver will not support this mode of operation, at present. + +We don't currently support a local "processes" type scheduler. If we did, the +behaviour should be very similar to a distributed scheduler. It would need to use some +other serialisable shared-lock solution in place of 'distributed.Lock', which requires +a distributed scheduler to function. + +""" +import threading + +import dask.array +import dask.base +import dask.multiprocessing +import dask.threaded + + +# A dedicated error class, allowing filtering and testing of errors raised here. +class DaskSchedulerTypeError(ValueError): + pass + + +def dask_scheduler_is_distributed(): + """Return whether a distributed.Client is active.""" + # NOTE: this replicates logic in `dask.base.get_scheduler` : if a distributed client + # has been created + is still active, then the default scheduler will always be + # "distributed". + is_distributed = False + # NOTE: must still work when 'distributed' is not available. + try: + import distributed + + client = distributed.get_client() + is_distributed = client is not None + except (ImportError, ValueError): + pass + return is_distributed + + +def get_dask_array_scheduler_type(): + """ + Work out what type of scheduler an array.compute*() will use. + + Returns one of 'distributed', 'threads' or 'processes'. + The return value is a valid argument for dask.config.set(scheduler=). + This cannot distinguish between distributed local and remote clusters -- both of + those simply return 'distributed'. + + NOTE: this takes account of how dask is *currently* configured. It will be wrong + if the config changes before the compute actually occurs. + + """ + if dask_scheduler_is_distributed(): + result = "distributed" + else: + # Call 'get_scheduler', which respects the config settings, but pass an array + # so we default to the default scheduler for that type of object. + trial_dask_array = dask.array.zeros(1) + get_function = dask.base.get_scheduler(collections=[trial_dask_array]) + # Detect the ones which we recognise. + if get_function == dask.threaded.get: + result = "threads" + elif get_function == dask.local.get_sync: + result = "single-threaded" + elif get_function == dask.multiprocessing.get: + result = "processes" + else: + msg = f"Dask default scheduler for arrays is unrecognised : {get_function}" + raise DaskSchedulerTypeError(msg) + + return result + + +def get_worker_lock(identity: str): + """ + Return a mutex Lock which can be shared by multiple Dask workers. + + The type of Lock generated depends on the dask scheduler type, which must therefore + be set up before this is called. + + """ + scheduler_type = get_dask_array_scheduler_type() + if scheduler_type in ("threads", "single-threaded"): + # N.B. the "identity" string is never used in this case, as the same actual + # lock object is used by all workers. + lock = threading.Lock() + elif scheduler_type == "distributed": + from dask.distributed import Lock as DistributedLock + + lock = DistributedLock(identity) + else: + msg = ( + "The configured dask array scheduler type is " + f'"{scheduler_type}", ' + "which is not supported by the Iris netcdf saver." + ) + raise DaskSchedulerTypeError(msg) + + # NOTE: not supporting 'processes' scheduler, for now. + return lock diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py new file mode 100644 index 0000000000..21c697acab --- /dev/null +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -0,0 +1,403 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module to ensure all calls to the netCDF4 library are thread-safe. + +Intention is that no other Iris module should import the netCDF4 module. + +""" +from abc import ABC +from threading import Lock +import typing + +import netCDF4 +import numpy as np + +_GLOBAL_NETCDF4_LOCK = Lock() + +# Doesn't need thread protection, but this allows all netCDF4 refs to be +# replaced with thread_safe refs. +default_fillvals = netCDF4.default_fillvals + + +class _ThreadSafeWrapper(ABC): + """ + Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK. + + Designed to 'gate keep' all the instance's API calls, but allowing the + same API as if working directly with the instance itself. + + Using a contained object instead of inheritance, as we cannot successfully + subclass or monkeypatch netCDF4 classes, because they are only wrappers for + the C-layer. + """ + + # Note: this is only used to create a "contained" from passed args. + CONTAINED_CLASS = NotImplemented + # Note: this defines how we identify/check that a contained is of the expected type + # (in a duck-type way). + _DUCKTYPE_CHECK_PROPERTIES: typing.List[str] = [NotImplemented] + + # Allows easy type checking, avoiding difficulties with isinstance and mocking. + THREAD_SAFE_FLAG = True + + @classmethod + def is_contained_type(cls, instance): + return all( + hasattr(instance, attr) for attr in cls._DUCKTYPE_CHECK_PROPERTIES + ) + + @classmethod + def from_existing(cls, instance): + """Pass an existing instance to __init__, where it is contained.""" + assert cls.is_contained_type(instance) + return cls(instance) + + def __init__(self, *args, **kwargs): + """Contain an existing instance, or generate a new one from arguments.""" + if len(args) == 1 and self.is_contained_type(args[0]): + # Passed a contained-type object : Wrap ourself around that. + instance = args[0] + # We should never find ourselves "wrapping a wrapper". + assert not hasattr(instance, "THREAD_SAFE_FLAG") + else: + # Create a contained object of the intended type from passed args. + with _GLOBAL_NETCDF4_LOCK: + instance = self.CONTAINED_CLASS(*args, **kwargs) + + self._contained_instance = instance + + def __getattr__(self, item): + if item == "_contained_instance": + # Special behaviour when accessing the _contained_instance itself. + return object.__getattribute__(self, item) + else: + with _GLOBAL_NETCDF4_LOCK: + return getattr(self._contained_instance, item) + + def __setattr__(self, key, value): + if key == "_contained_instance": + # Special behaviour when accessing the _contained_instance itself. + object.__setattr__(self, key, value) + else: + with _GLOBAL_NETCDF4_LOCK: + return setattr(self._contained_instance, key, value) + + def __getitem__(self, item): + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.__getitem__(item) + + def __setitem__(self, key, value): + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.__setitem__(key, value) + + +class DimensionWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Dimension, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Dimension. + """ + + CONTAINED_CLASS = netCDF4.Dimension + _DUCKTYPE_CHECK_PROPERTIES = ["isunlimited"] + + +class VariableWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Variable, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Variable. + """ + + CONTAINED_CLASS = netCDF4.Variable + _DUCKTYPE_CHECK_PROPERTIES = ["dimensions", "dtype"] + + def setncattr(self, *args, **kwargs) -> None: + """ + Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. + + Only defined explicitly in order to get some mocks to work. + """ + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.setncattr(*args, **kwargs) + + @property + def dimensions(self) -> typing.List[str]: + """ + Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. + + Only defined explicitly in order to get some mocks to work. + """ + with _GLOBAL_NETCDF4_LOCK: + # Return value is a list of strings so no need for + # DimensionWrapper, unlike self.get_dims(). + return self._contained_instance.dimensions + + # All Variable API that returns Dimension(s) is wrapped to instead return + # DimensionWrapper(s). + + def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: + """ + Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + dimensions_ = list( + self._contained_instance.get_dims(*args, **kwargs) + ) + return tuple([DimensionWrapper.from_existing(d) for d in dimensions_]) + + +class GroupWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Group, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Group. + """ + + CONTAINED_CLASS = netCDF4.Group + # Note: will also accept a whole Dataset object, but that is OK. + _DUCKTYPE_CHECK_PROPERTIES = ["createVariable"] + + # All Group API that returns Dimension(s) is wrapped to instead return + # DimensionWrapper(s). + + @property + def dimensions(self) -> typing.Dict[str, DimensionWrapper]: + """ + Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + dimensions_ = self._contained_instance.dimensions + return { + k: DimensionWrapper.from_existing(v) + for k, v in dimensions_.items() + } + + def createDimension(self, *args, **kwargs) -> DimensionWrapper: + """ + Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. + + The original returned netCDF4.Dimension is simply replaced with its + respective DimensionWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_dimension = self._contained_instance.createDimension( + *args, **kwargs + ) + return DimensionWrapper.from_existing(new_dimension) + + # All Group API that returns Variable(s) is wrapped to instead return + # VariableWrapper(s). + + @property + def variables(self) -> typing.Dict[str, VariableWrapper]: + """ + Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + + The original returned netCDF4.Variables are simply replaced with their + respective VariableWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + variables_ = self._contained_instance.variables + return { + k: VariableWrapper.from_existing(v) for k, v in variables_.items() + } + + def createVariable(self, *args, **kwargs) -> VariableWrapper: + """ + Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. + + The original returned netCDF4.Variable is simply replaced with its + respective VariableWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_variable = self._contained_instance.createVariable( + *args, **kwargs + ) + return VariableWrapper.from_existing(new_variable) + + def get_variables_by_attributes( + self, *args, **kwargs + ) -> typing.List[VariableWrapper]: + """ + Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + + The original returned netCDF4.Variables are simply replaced with their + respective VariableWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + variables_ = list( + self._contained_instance.get_variables_by_attributes( + *args, **kwargs + ) + ) + return [VariableWrapper.from_existing(v) for v in variables_] + + # All Group API that returns Group(s) is wrapped to instead return + # GroupWrapper(s). + + @property + def groups(self): + """ + Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. + + The original returned netCDF4.Groups are simply replaced with their + respective GroupWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + groups_ = self._contained_instance.groups + return {k: GroupWrapper.from_existing(v) for k, v in groups_.items()} + + @property + def parent(self): + """ + Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. + + The original returned netCDF4.Group is simply replaced with its + respective GroupWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + parent_ = self._contained_instance.parent + return GroupWrapper.from_existing(parent_) + + def createGroup(self, *args, **kwargs): + """ + Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. + + The original returned netCDF4.Group is simply replaced with its + respective GroupWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_group = self._contained_instance.createGroup(*args, **kwargs) + return GroupWrapper.from_existing(new_group) + + +class DatasetWrapper(GroupWrapper): + """ + Accessor for a netCDF4.Dataset, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Dataset. + """ + + CONTAINED_CLASS = netCDF4.Dataset + # Note: 'close' exists on Dataset but not Group (though a rather weak distinction). + _DUCKTYPE_CHECK_PROPERTIES = ["createVariable", "close"] + + @classmethod + def fromcdl(cls, *args, **kwargs): + """ + Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper. + + The original returned netCDF4.Dataset is simply replaced with its + respective DatasetWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs) + return cls.from_existing(instance) + + +class NetCDFDataProxy: + """A reference to the data payload of a single NetCDF file variable.""" + + __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") + + def __init__(self, shape, dtype, path, variable_name, fill_value): + self.shape = shape + self.dtype = dtype + self.path = path + self.variable_name = variable_name + self.fill_value = fill_value + + @property + def ndim(self): + return len(self.shape) + + def __getitem__(self, keys): + # Using a DatasetWrapper causes problems with invalid ID's and the + # netCDF4 library, presumably because __getitem__ gets called so many + # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. + with _GLOBAL_NETCDF4_LOCK: + dataset = netCDF4.Dataset(self.path) + try: + variable = dataset.variables[self.variable_name] + # Get the NetCDF variable data and slice. + var = variable[keys] + finally: + dataset.close() + return np.asanyarray(var) + + def __repr__(self): + fmt = ( + "<{self.__class__.__name__} shape={self.shape}" + " dtype={self.dtype!r} path={self.path!r}" + " variable_name={self.variable_name!r}>" + ) + return fmt.format(self=self) + + def __getstate__(self): + return {attr: getattr(self, attr) for attr in self.__slots__} + + def __setstate__(self, state): + for key, value in state.items(): + setattr(self, key, value) + + +class NetCDFWriteProxy: + """ + The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a + netCDF4.Variable, but where the data is to be ***written to***. + + It encapsulates the netcdf file and variable which are actually to be written to. + This opens the file each time, to enable writing the data chunk, then closes it. + TODO: could be improved with a caching scheme, but this just about works. + """ + + def __init__(self, filepath, cf_var, file_write_lock): + self.path = filepath + self.varname = cf_var.name + self.lock = file_write_lock + + def __setitem__(self, keys, array_data): + # Write to the variable. + # First acquire a file-specific lock for all workers writing to this file. + self.lock.acquire() + # Open the file for writing + write to the specific file variable. + # Exactly as above, in NetCDFDataProxy : a DatasetWrapper causes problems with + # invalid ID's and the netCDF4 library, for so-far unknown reasons. + # Instead, use _GLOBAL_NETCDF4_LOCK, and netCDF4 _directly_. + with _GLOBAL_NETCDF4_LOCK: + dataset = None + try: + dataset = netCDF4.Dataset(self.path, "r+") + var = dataset.variables[self.varname] + var[keys] = array_data + finally: + try: + if dataset: + dataset.close() + finally: + # *ALWAYS* let go ! + self.lock.release() + + def __repr__(self): + return f"<{self.__class__.__name__} path={self.path!r} var={self.varname!r}>" diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 95f394c70d..20d255ea44 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -13,9 +13,9 @@ Also : `CF Conventions `_. """ +from collections.abc import Iterable import warnings -import netCDF4 import numpy as np from iris._lazy_data import as_lazy_data @@ -34,6 +34,7 @@ import iris.coords import iris.exceptions import iris.fileformats.cf +from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.netcdf.saver import _CF_ATTRS import iris.io import iris.util @@ -44,6 +45,10 @@ # Get the logger : shared logger for all in 'iris.fileformats.netcdf'. from . import logger +# An expected part of the public loader API, but includes thread safety +# concerns so is housed in _thread_safe_nc. +NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy + def _actions_engine(): # Return an 'actions engine', which provides a pyke-rules-like interface to @@ -55,48 +60,6 @@ def _actions_engine(): return engine -class NetCDFDataProxy: - """A reference to the data payload of a single NetCDF file variable.""" - - __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") - - def __init__(self, shape, dtype, path, variable_name, fill_value): - self.shape = shape - self.dtype = dtype - self.path = path - self.variable_name = variable_name - self.fill_value = fill_value - - @property - def ndim(self): - return len(self.shape) - - def __getitem__(self, keys): - dataset = netCDF4.Dataset(self.path) - try: - variable = dataset.variables[self.variable_name] - # Get the NetCDF variable data and slice. - var = variable[keys] - finally: - dataset.close() - return np.asanyarray(var) - - def __repr__(self): - fmt = ( - "<{self.__class__.__name__} shape={self.shape}" - " dtype={self.dtype!r} path={self.path!r}" - " variable_name={self.variable_name!r}>" - ) - return fmt.format(self=self) - - def __getstate__(self): - return {attr: getattr(self, attr) for attr in self.__slots__} - - def __setstate__(self, state): - for key, value in state.items(): - setattr(self, key, value) - - def _assert_case_specific_facts(engine, cf, cf_group): # Initialise a data store for built cube elements. # This is used to patch element attributes *not* setup by the actions @@ -211,26 +174,61 @@ def _get_actual_dtype(cf_var): return dummy_data.dtype +# An arbitrary variable array size, below which we will fetch real data from a variable +# rather than making a lazy array for deferred access. +# Set by experiment at roughly the point where it begins to save us memory, but actually +# mostly done for speed improvement. See https://github.com/SciTools/iris/pull/5069 +_LAZYVAR_MIN_BYTES = 5000 + + def _get_cf_var_data(cf_var, filename): - # Get lazy chunked data out of a cf variable. - dtype = _get_actual_dtype(cf_var) - - # Create cube with deferred data, but no metadata - fill_value = getattr( - cf_var.cf_data, - "_FillValue", - netCDF4.default_fillvals[cf_var.dtype.str[1:]], - ) - proxy = NetCDFDataProxy( - cf_var.shape, dtype, filename, cf_var.cf_name, fill_value - ) - # Get the chunking specified for the variable : this is either a shape, or - # maybe the string "contiguous". - chunks = cf_var.cf_data.chunking() - # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. - if chunks == "contiguous": - chunks = None - return as_lazy_data(proxy, chunks=chunks) + """ + Get an array representing the data of a CF variable. + + This is typically a lazy array based around a NetCDFDataProxy, but if the variable + is "sufficiently small", we instead fetch the data as a real (numpy) array. + The latter is especially valuable for scalar coordinates, which are otherwise + unnecessarily slow + wasteful of memory. + + """ + if hasattr(cf_var, "_data_array"): + # The variable is not an actual netCDF4 file variable, but an emulating + # object with an attached data array (either numpy or dask), which can be + # returned immediately as-is. This is used as a hook to translate data to/from + # netcdf data container objects in other packages, such as xarray. + # See https://github.com/SciTools/iris/issues/4994 "Xarray bridge". + result = cf_var._data_array + else: + total_bytes = cf_var.size * cf_var.dtype.itemsize + if total_bytes < _LAZYVAR_MIN_BYTES: + # Don't make a lazy array, as it will cost more memory AND more time to access. + # Instead fetch the data immediately, as a real array, and return that. + result = cf_var[:] + + else: + # Get lazy chunked data out of a cf variable. + dtype = _get_actual_dtype(cf_var) + + # Make a data-proxy that mimics array access and can fetch from the file. + fill_value = getattr( + cf_var.cf_data, + "_FillValue", + _thread_safe_nc.default_fillvals[cf_var.dtype.str[1:]], + ) + proxy = NetCDFDataProxy( + cf_var.shape, dtype, filename, cf_var.cf_name, fill_value + ) + # Get the chunking specified for the variable : this is either a shape, or + # maybe the string "contiguous". + chunks = cf_var.cf_data.chunking() + # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. + if chunks == "contiguous": + chunks = None + + # Return a dask array providing deferred access. + result = as_lazy_data(proxy, chunks=chunks) + + return result class _OrderedAddableList(list): @@ -494,14 +492,15 @@ def inner(cf_datavar): return result -def load_cubes(filenames, callback=None, constraints=None): +def load_cubes(file_sources, callback=None, constraints=None): """ Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. Args: - * filenames (string/list): + * file_sources (string/list): One or more NetCDF filenames/OPeNDAP URLs to load from. + OR open datasets. Kwargs: @@ -529,66 +528,69 @@ def load_cubes(filenames, callback=None, constraints=None): # Create an actions engine. engine = _actions_engine() - if isinstance(filenames, str): - filenames = [filenames] + if isinstance(file_sources, str) or not isinstance(file_sources, Iterable): + file_sources = [file_sources] - for filename in filenames: - # Ingest the netCDF file. + for file_source in file_sources: + # Ingest the file. At present may be a filepath or an open netCDF4.Dataset. meshes = {} if PARSE_UGRID_ON_LOAD: - cf = CFUGridReader(filename) - meshes = _meshes_from_cf(cf) + cf_reader_class = CFUGridReader else: - cf = iris.fileformats.cf.CFReader(filename) + cf_reader_class = iris.fileformats.cf.CFReader - # Process each CF data variable. - data_variables = list(cf.cf_group.data_variables.values()) + list( - cf.cf_group.promoted.values() - ) - for cf_var in data_variables: - if var_callback and not var_callback(cf_var): - # Deliver only selected results. - continue - - # cf_var-specific mesh handling, if a mesh is present. - # Build the mesh_coords *before* loading the cube - avoids - # mesh-related attributes being picked up by - # _add_unused_attributes(). - mesh_name = None - mesh = None - mesh_coords, mesh_dim = [], None + with cf_reader_class(file_source) as cf: if PARSE_UGRID_ON_LOAD: - mesh_name = getattr(cf_var, "mesh", None) - if mesh_name is not None: + meshes = _meshes_from_cf(cf) + + # Process each CF data variable. + data_variables = list(cf.cf_group.data_variables.values()) + list( + cf.cf_group.promoted.values() + ) + for cf_var in data_variables: + if var_callback and not var_callback(cf_var): + # Deliver only selected results. + continue + + # cf_var-specific mesh handling, if a mesh is present. + # Build the mesh_coords *before* loading the cube - avoids + # mesh-related attributes being picked up by + # _add_unused_attributes(). + mesh_name = None + mesh = None + mesh_coords, mesh_dim = [], None + if PARSE_UGRID_ON_LOAD: + mesh_name = getattr(cf_var, "mesh", None) + if mesh_name is not None: + try: + mesh = meshes[mesh_name] + except KeyError: + message = ( + f"File does not contain mesh: '{mesh_name}' - " + f"referenced by variable: '{cf_var.cf_name}' ." + ) + logger.debug(message) + if mesh is not None: + mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var) + + cube = _load_cube(engine, cf, cf_var, cf.filename) + + # Attach the mesh (if present) to the cube. + for mesh_coord in mesh_coords: + cube.add_aux_coord(mesh_coord, mesh_dim) + + # Process any associated formula terms and attach + # the corresponding AuxCoordFactory. try: - mesh = meshes[mesh_name] - except KeyError: - message = ( - f"File does not contain mesh: '{mesh_name}' - " - f"referenced by variable: '{cf_var.cf_name}' ." - ) - logger.debug(message) - if mesh is not None: - mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var) - - cube = _load_cube(engine, cf, cf_var, filename) - - # Attach the mesh (if present) to the cube. - for mesh_coord in mesh_coords: - cube.add_aux_coord(mesh_coord, mesh_dim) - - # Process any associated formula terms and attach - # the corresponding AuxCoordFactory. - try: - _load_aux_factory(engine, cube) - except ValueError as e: - warnings.warn("{}".format(e)) - - # Perform any user registered callback function. - cube = run_callback(callback, cube, cf_var, filename) - - # Callback mechanism may return None, which must not be yielded - if cube is None: - continue - - yield cube + _load_aux_factory(engine, cube) + except ValueError as e: + warnings.warn("{}".format(e)) + + # Perform any user registered callback function. + cube = run_callback(callback, cube, cf_var, file_source) + + # Callback mechanism may return None, which must not be yielded + if cube is None: + continue + + yield cube diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 650c5e3338..312eea9c43 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -23,10 +23,10 @@ import warnings import cf_units +import dask import dask.array as da -import netCDF4 +from dask.delayed import Delayed import numpy as np -import numpy.ma as ma from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( @@ -45,6 +45,7 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf +from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc import iris.io import iris.util @@ -156,207 +157,6 @@ } -# Cell methods. -_CM_KNOWN_METHODS = [ - "point", - "sum", - "mean", - "maximum", - "minimum", - "mid_range", - "standard_deviation", - "variance", - "mode", - "median", -] - -_CM_COMMENT = "comment" -_CM_EXTRA = "extra" -_CM_INTERVAL = "interval" -_CM_METHOD = "method" -_CM_NAME = "name" -_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") -_CM_PARSE = re.compile( - r""" - (?P([\w_]+\s*?:\s+)+) - (?P[\w_\s]+(?![\w_]*\s*?:))\s* - (?: - \(\s* - (?P.+) - \)\s* - )? - """, - re.VERBOSE, -) - - -class UnknownCellMethodWarning(Warning): - pass - - -def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """ - Split a CF cell_methods attribute string into a list of zero or more cell - methods, each of which is then parsed with a regex to return a list of match - objects. - - Args: - - * nc_cell_methods: The value of the cell methods attribute to be split. - - Returns: - - * nc_cell_methods_matches: A list of the re.Match objects associated with - each parsed cell method - - Splitting is done based on words followed by colons outside of any brackets. - Validation of anything other than being laid out in the expected format is - left to the calling function. - """ - - # Find name candidates - name_start_inds = [] - for m in _CM_PARSE_NAME.finditer(nc_cell_methods): - name_start_inds.append(m.start()) - - # Remove those that fall inside brackets - bracket_depth = 0 - for ind, cha in enumerate(nc_cell_methods): - if cha == "(": - bracket_depth += 1 - elif cha == ")": - bracket_depth -= 1 - if bracket_depth < 0: - msg = ( - "Cell methods may be incorrectly parsed due to mismatched " - "brackets" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - if bracket_depth > 0 and ind in name_start_inds: - name_start_inds.remove(ind) - - # List tuples of indices of starts and ends of the cell methods in the string - method_indices = [] - for ii in range(len(name_start_inds) - 1): - method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) - method_indices.append((name_start_inds[-1], len(nc_cell_methods))) - - # Index the string and match against each substring - nc_cell_methods_matches = [] - for start_ind, end_ind in method_indices: - nc_cell_method_str = nc_cell_methods[start_ind:end_ind] - nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) - if not nc_cell_method_match: - msg = ( - f"Failed to fully parse cell method string: {nc_cell_methods}" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - continue - nc_cell_methods_matches.append(nc_cell_method_match) - - return nc_cell_methods_matches - - -def parse_cell_methods(nc_cell_methods): - """ - Parse a CF cell_methods attribute string into a tuple of zero or - more CellMethod instances. - - Args: - - * nc_cell_methods (str): - The value of the cell methods attribute to be parsed. - - Returns: - - * cell_methods - An iterable of :class:`iris.coords.CellMethod`. - - Multiple coordinates, intervals and comments are supported. - If a method has a non-standard name a warning will be issued, but the - results are not affected. - - """ - - cell_methods = [] - if nc_cell_methods is not None: - for m in _split_cell_methods(nc_cell_methods): - d = m.groupdict() - method = d[_CM_METHOD] - method = method.strip() - # Check validity of method, allowing for multi-part methods - # e.g. mean over years. - method_words = method.split() - if method_words[0].lower() not in _CM_KNOWN_METHODS: - msg = "NetCDF variable contains unknown cell method {!r}" - warnings.warn( - msg.format("{}".format(method_words[0])), - UnknownCellMethodWarning, - ) - d[_CM_METHOD] = method - name = d[_CM_NAME] - name = name.replace(" ", "") - name = name.rstrip(":") - d[_CM_NAME] = tuple([n for n in name.split(":")]) - interval = [] - comment = [] - if d[_CM_EXTRA] is not None: - # - # tokenise the key words and field colon marker - # - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "comment:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "interval:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") - if len(d[_CM_EXTRA]) == 1: - comment.extend(d[_CM_EXTRA]) - else: - next_field_type = comment - for field in d[_CM_EXTRA]: - field_type = next_field_type - index = field.rfind("<>") - if index == 0: - next_field_type = interval - continue - elif index > 0: - next_field_type = interval - else: - index = field.rfind("<>") - if index == 0: - next_field_type = comment - continue - elif index > 0: - next_field_type = comment - if index != -1: - field = field[:index] - field_type.append(field.strip()) - # - # cater for a shared interval over multiple axes - # - if len(interval): - if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: - interval = interval * len(d[_CM_NAME]) - # - # cater for a shared comment over multiple axes - # - if len(comment): - if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: - comment = comment * len(d[_CM_NAME]) - d[_CM_INTERVAL] = tuple(interval) - d[_CM_COMMENT] = tuple(comment) - cell_method = iris.coords.CellMethod( - d[_CM_METHOD], - coords=d[_CM_NAME], - intervals=d[_CM_INTERVAL], - comments=d[_CM_COMMENT], - ) - cell_methods.append(cell_method) - return tuple(cell_methods) - - class CFNameCoordMap: """Provide a simple CF name to CF coordinate mapping.""" @@ -459,63 +259,159 @@ def _setncattr(variable, name, attribute): Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. + NOTE: variable needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + """ + assert hasattr(variable, "THREAD_SAFE_FLAG") attribute = _bytes_if_ascii(attribute) return variable.setncattr(name, attribute) -class _FillValueMaskCheckAndStoreTarget: +# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, +# but in the preferred order for coord/connectivity variables in the file. +MESH_ELEMENTS = ("node", "edge", "face") + + +_FillvalueCheckInfo = collections.namedtuple( + "_FillvalueCheckInfo", ["user_value", "check_value", "dtype", "varname"] +) + + +def _data_fillvalue_check(arraylib, data, check_value): """ - To be used with da.store. Remembers whether any element was equal to a - given value and whether it was masked, before passing the chunk to the - given target. + Check whether an array is masked, and whether it contains a fill-value. + + Parameters + ---------- + arraylib : module + Either numpy or dask.array : When dask, results are lazy computations. + data : array-like + Array to check (numpy or dask) + check_value : number or None + If not None, fill-value to check for existence in the array. + If None, do not do value-in-array check + + Returns + ------- + is_masked : bool + True if array has any masked points. + contains_value : bool + True if array contains check_value. + Always False if check_value is None. """ + is_masked = arraylib.any(arraylib.ma.getmaskarray(data)) + if check_value is None: + contains_value = False + else: + contains_value = arraylib.any(data == check_value) + return is_masked, contains_value - def __init__(self, target, fill_value=None): - self.target = target - self.fill_value = fill_value - self.contains_value = False - self.is_masked = False - def __setitem__(self, keys, arr): - if self.fill_value is not None: - self.contains_value = self.contains_value or self.fill_value in arr - self.is_masked = self.is_masked or ma.is_masked(arr) - self.target[keys] = arr +class SaverFillValueWarning(UserWarning): + pass -# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, -# but in the preferred order for coord/connectivity variables in the file. -MESH_ELEMENTS = ("node", "edge", "face") +def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): + """ + From the given information, work out whether there was a possible or actual + fill-value collision, and if so construct a warning. + + Parameters + ---------- + fill_info : _FillvalueCheckInfo + A named-tuple containing the context of the fill-value check + is_masked : bool + whether the data array was masked + contains_fill_value : bool + whether the data array contained the fill-value + warn : bool + if True, also issue any resulting warning immediately. + + Returns + ------- + None or :class:`Warning` + If not None, indicates a known or possible problem with filling + + """ + varname = fill_info.varname + user_value = fill_info.user_value + check_value = fill_info.check_value + is_byte_data = fill_info.dtype.itemsize == 1 + result = None + if is_byte_data and is_masked and user_value is None: + result = SaverFillValueWarning( + f"CF var '{varname}' contains byte data with masked points, but " + "no fill_value keyword was given. As saved, these " + "points will read back as valid values. To save as " + "masked byte data, `_FillValue` needs to be explicitly " + "set. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + elif contains_fill_value: + result = SaverFillValueWarning( + f"CF var '{varname}' contains unmasked data points equal to the " + f"fill-value, {check_value}. As saved, these points will read back " + "as missing data. To save these as normal values, " + "`_FillValue` needs to be set to not equal any valid data " + "points. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + + if warn and result is not None: + warnings.warn(result) + return result class Saver: """A manager for saving netcdf files.""" - def __init__(self, filename, netcdf_format): + def __init__(self, filename, netcdf_format, compute=True): """ A manager for saving netcdf files. - Args: - - * filename (string): + Parameters + ---------- + filename : string or netCDF4.Dataset Name of the netCDF file to save the cube. + OR a writeable object supporting the :class:`netCF4.Dataset` api. - * netcdf_format (string): + netcdf_format : string Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - Returns: - None. - - For example:: + compute : bool, default=True + If ``True``, delayed variable saves will be completed on exit from the Saver + context (after first closing the target file), equivalent to + :meth:`complete()`. + + If ``False``, the file is created and closed without writing the data of + variables for which the source data was lazy. These writes can be + completed later, see :meth:`delayed_completion`. + + .. Note:: + If ``filename`` is an open dataset, rather than a filepath, then the + caller must specify ``compute=False``, **close the dataset**, and + complete delayed saving afterwards. + If ``compute`` is ``True`` in this case, an error is raised. + This is because lazy content must be written by delayed save operations, + which will only succeed if the dataset can be (re-)opened for writing. + See :func:`save`. + + Returns + ------- + None + + Example + ------- + >>> import iris + >>> from iris.fileformats.netcdf.saver import Saver + >>> cubes = iris.load(iris.sample_data_path('atlantic_profiles.nc')) + >>> with Saver("tmp.nc", "NETCDF4") as sman: + ... # Iterate through the cubelist. + ... for cube in cubes: + ... sman.write(cube) - # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: - # Iterate through the cubelist. - for cube in cubes: - sman.write(cube) """ if netcdf_format not in [ @@ -542,30 +438,77 @@ def __init__(self, filename, netcdf_format): self._mesh_dims = {} #: A dictionary, mapping formula terms to owner cf variable name self._formula_terms_cache = {} - #: NetCDF dataset - try: - self._dataset = netCDF4.Dataset( - filename, mode="w", format=netcdf_format - ) - except RuntimeError: - dir_name = os.path.dirname(filename) - if not os.path.isdir(dir_name): - msg = "No such file or directory: {}".format(dir_name) - raise IOError(msg) - if not os.access(dir_name, os.R_OK | os.W_OK): - msg = "Permission denied: {}".format(filename) - raise IOError(msg) - else: - raise + #: Target filepath + self.filepath = ( + None # this line just for the API page -- value is set later + ) + #: Whether to complete delayed saves on exit (and raise associated warnings). + self.compute = compute + # N.B. the file-write-lock *type* actually depends on the dask scheduler type. + #: A per-file write lock to prevent dask attempting overlapping writes. + self.file_write_lock = ( + None # this line just for the API page -- value is set later + ) + + # A list of delayed writes for lazy saving + # a list of triples (source, target, fill-info). + self._delayed_writes = [] + + # Detect if we were passed a pre-opened dataset (or something like one) + self._to_open_dataset = hasattr(filename, "createVariable") + if self._to_open_dataset: + # We were passed a *dataset*, so we don't open (or close) one of our own. + self._dataset = filename + if compute: + msg = ( + "Cannot save to a user-provided dataset with 'compute=True'. " + "Please use 'compute=False' and complete delayed saving in the " + "calling code after the file is closed." + ) + raise ValueError(msg) + + # Put it inside a _thread_safe_nc wrapper to ensure thread-safety. + # Except if it already is one, since they forbid "re-wrapping". + if not hasattr(self._dataset, "THREAD_SAFE_FLAG"): + self._dataset = _thread_safe_nc.DatasetWrapper.from_existing( + self._dataset + ) + + # In this case the dataset gives a filepath, not the other way around. + self.filepath = self._dataset.filepath() + + else: + # Given a filepath string/path : create a dataset from that + try: + self.filepath = os.path.abspath(filename) + self._dataset = _thread_safe_nc.DatasetWrapper( + self.filepath, mode="w", format=netcdf_format + ) + except RuntimeError: + dir_name = os.path.dirname(self.filepath) + if not os.path.isdir(dir_name): + msg = "No such file or directory: {}".format(dir_name) + raise IOError(msg) + if not os.access(dir_name, os.R_OK | os.W_OK): + msg = "Permission denied: {}".format(self.filepath) + raise IOError(msg) + else: + raise + + self.file_write_lock = _dask_locks.get_worker_lock(self.filepath) def __enter__(self): return self def __exit__(self, type, value, traceback): """Flush any buffered data to the CF-netCDF file before closing.""" - self._dataset.sync() - self._dataset.close() + if not self._to_open_dataset: + # Only close if the Saver created it. + self._dataset.close() + # Complete after closing, if required + if self.compute: + self.complete() def write( self, @@ -1030,7 +973,7 @@ def _add_inner_related_vars( """ if coordlike_elements: - # Choose the approriate parent attribute + # Choose the appropriate parent attribute elem_type = type(coordlike_elements[0]) if elem_type in (AuxCoord, DimCoord): role_attribute_name = "coordinates" @@ -1184,7 +1127,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): warnings.warn(msg) else: # Override `standard_name`, `long_name`, and `axis` of the - # primary coord that signals the presense of a dimensionless + # primary coord that signals the presence of a dimensionless # vertical coord, then set the `formula_terms` attribute. primary_coord = factory.dependencies[factory_defn.primary] if primary_coord in primaries: @@ -1491,7 +1434,7 @@ def cf_valid_var_name(var_name): A var_name suitable for passing through for variable creation. """ - # Replace invalid charaters with an underscore ("_"). + # Replace invalid characters with an underscore ("_"). var_name = re.sub(r"[^a-zA-Z0-9]", "_", var_name) # Ensure the variable name starts with a letter. if re.match(r"^[^a-zA-Z]", var_name): @@ -1927,16 +1870,6 @@ def _create_generic_cf_array_var( # Check if this is a dim-coord. is_dimcoord = cube is not None and element in cube.dim_coords - if isinstance(element, iris.coords.CellMeasure): - # Disallow saving of *masked* cell measures. - # NOTE: currently, this is the only functional difference in - # variable creation between an ancillary and a cell measure. - if iris.util.is_masked(data): - # We can't save masked points properly, as we don't maintain - # a fill_value. (Load will not record one, either). - msg = "Cell measures with missing data are not supported." - raise ValueError(msg) - if is_dimcoord: # By definition of a CF-netCDF coordinate variable this # coordinate must be 1-D and the name of the CF-netCDF variable @@ -2331,7 +2264,13 @@ def _create_cf_data_variable( dtype = data.dtype.newbyteorder("=") def set_packing_ncattrs(cfvar): - """Set netCDF packing attributes.""" + """ + Set netCDF packing attributes. + + NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + + """ + assert hasattr(cfvar, "THREAD_SAFE_FLAG") if packing: if scale_factor: _setncattr(cfvar, "scale_factor", scale_factor) @@ -2442,8 +2381,7 @@ def _increment_name(self, varname): return "{}_{}".format(varname, num) - @staticmethod - def _lazy_stream_data(data, fill_value, fill_warn, cf_var): + def _lazy_stream_data(self, data, fill_value, fill_warn, cf_var): if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: # (Don't do this check for string data). # Reduce dimensionality where the data array has an extra dimension @@ -2452,62 +2390,192 @@ def _lazy_stream_data(data, fill_value, fill_warn, cf_var): # contains just 1 row, so the cf_var is 1D. data = data.squeeze(axis=0) - if is_lazy_data(data): + if hasattr(cf_var, "_data_array"): + # The variable is not an actual netCDF4 file variable, but an emulating + # object with an attached data array (either numpy or dask), which should be + # copied immediately to the target. This is used as a hook to translate + # data to/from netcdf data container objects in other packages, such as + # xarray. + # See https://github.com/SciTools/iris/issues/4994 "Xarray bridge". + # N.B. also, in this case there is no need for fill-value checking as the + # data is not being translated to an in-file representation. + cf_var._data_array = data + else: + # Decide whether we are checking for fill-value collisions. + dtype = cf_var.dtype + # fill_warn allows us to skip warning if packing attributes have been + # specified. It would require much more complex operations to work out + # what the values and fill_value _would_ be in such a case. + if fill_warn: + if fill_value is not None: + fill_value_to_check = fill_value + else: + # Retain 'fill_value == None', to show that no specific value was given. + # But set 'fill_value_to_check' to a calculated value + fill_value_to_check = _thread_safe_nc.default_fillvals[ + dtype.str[1:] + ] + # Cast the check-value to the correct dtype. + # NOTE: In the case of 'S1' dtype (at least), the default (Python) value + # does not have a compatible type. This causes a deprecation warning at + # numpy 1.24, *and* was preventing correct fill-value checking of character + # data, since they are actually bytes (dtype 'S1'). + fill_value_to_check = np.array( + fill_value_to_check, dtype=dtype + ) + else: + # A None means we will NOT check for collisions. + fill_value_to_check = None + + fill_info = _FillvalueCheckInfo( + user_value=fill_value, + check_value=fill_value_to_check, + dtype=dtype, + varname=cf_var.name, + ) + + doing_delayed_save = is_lazy_data(data) + if doing_delayed_save: + # save lazy data with a delayed operation. For now, we just record the + # necessary information -- a single, complete delayed action is constructed + # later by a call to delayed_completion(). + def store(data, cf_var, fill_info): + # Create a data-writeable object that we can stream into, which + # encapsulates the file to be opened + variable to be written. + write_wrapper = _thread_safe_nc.NetCDFWriteProxy( + self.filepath, cf_var, self.file_write_lock + ) + # Add to the list of delayed writes, used in delayed_completion(). + self._delayed_writes.append( + (data, write_wrapper, fill_info) + ) + # In this case, fill-value checking is done later. But return 2 dummy + # values, to be consistent with the non-streamed "store" signature. + is_masked, contains_value = False, False + return is_masked, contains_value - def store(data, cf_var, fill_value): - # Store lazy data and check whether it is masked and contains - # the fill value - target = _FillValueMaskCheckAndStoreTarget(cf_var, fill_value) - da.store([data], [target]) - return target.is_masked, target.contains_value + else: + # Real data is always written directly, i.e. not via lazy save. + # We also check it immediately for any fill-value problems. + def store(data, cf_var, fill_info): + cf_var[:] = data + return _data_fillvalue_check( + np, data, fill_info.check_value + ) - else: + # Store the data and check if it is masked and contains the fill value. + is_masked, contains_fill_value = store(data, cf_var, fill_info) - def store(data, cf_var, fill_value): - cf_var[:] = data - is_masked = np.ma.is_masked(data) - contains_value = fill_value is not None and fill_value in data - return is_masked, contains_value + if not doing_delayed_save: + # Issue a fill-value warning immediately, if appropriate. + _fillvalue_report( + fill_info, is_masked, contains_fill_value, warn=True + ) - dtype = cf_var.dtype + def delayed_completion(self) -> Delayed: + """ + Create and return a :class:`dask.delayed.Delayed` to perform file completion + for delayed saves. + + This contains all the delayed writes, which complete the file by filling out + the data of variables initially created empty, and also the checks for + potential fill-value collisions. + When computed, it returns a list of any warnings which were generated in the + save operation. + + Returns + ------- + completion : :class:`dask.delayed.Delayed` + + Notes + ----- + The dataset *must* be closed (saver has exited its context) before the + result can be computed, otherwise computation will hang (never return). + """ + if self._delayed_writes: + # Create a single delayed da.store operation to complete the file. + sources, targets, fill_infos = zip(*self._delayed_writes) + store_op = da.store(sources, targets, compute=False, lock=False) + + # Construct a delayed fill-check operation for each (lazy) source array. + delayed_fillvalue_checks = [ + # NB with arraylib=dask.array, this routine does lazy array computation + _data_fillvalue_check(da, source, fillinfo.check_value) + for source, fillinfo in zip(sources, fill_infos) + ] + + # Return a single delayed object which completes the delayed saves and + # returns a list of any fill-value warnings. + @dask.delayed + def compute_and_return_warnings(store_op, fv_infos, fv_checks): + # Note: we don't actually *do* anything with the 'store_op' argument, + # but including it here ensures that dask will compute it (thus + # performing all the delayed saves), before calling this function. + results = [] + # Pair each fill_check result (is_masked, contains_value) with its + # fillinfo and construct a suitable Warning if needed. + for fillinfo, (is_masked, contains_value) in zip( + fv_infos, fv_checks + ): + fv_warning = _fillvalue_report( + fill_info=fillinfo, + is_masked=is_masked, + contains_fill_value=contains_value, + ) + if fv_warning is not None: + # Collect the warnings and return them. + results.append(fv_warning) + return results + + result = compute_and_return_warnings( + store_op, + fv_infos=fill_infos, + fv_checks=delayed_fillvalue_checks, + ) - # fill_warn allows us to skip warning if packing attributes have been - # specified. It would require much more complex operations to work out - # what the values and fill_value _would_ be in such a case. - if fill_warn: - if fill_value is not None: - fill_value_to_check = fill_value - else: - fill_value_to_check = netCDF4.default_fillvals[dtype.str[1:]] else: - fill_value_to_check = None + # Return a delayed, which returns an empty list, for usage consistency. + @dask.delayed + def no_op(): + return [] - # Store the data and check if it is masked and contains the fill value. - is_masked, contains_fill_value = store( - data, cf_var, fill_value_to_check - ) + result = no_op() - if dtype.itemsize == 1 and fill_value is None: - if is_masked: - msg = ( - "CF var '{}' contains byte data with masked points, but " - "no fill_value keyword was given. As saved, these " - "points will read back as valid values. To save as " - "masked byte data, `_FillValue` needs to be explicitly " - "set. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." - ) - warnings.warn(msg.format(cf_var.name)) - elif contains_fill_value: + return result + + def complete(self, issue_warnings=True) -> List[Warning]: + """ + Complete file by computing any delayed variable saves. + + This requires that the Saver has closed the dataset (exited its context). + + Parameters + ---------- + issue_warnings : bool, default = True + If true, issue all the resulting warnings with :func:`warnings.warn`. + + Returns + ------- + warnings : list of Warning + Any warnings that were raised while writing delayed data. + + """ + if self._dataset.isopen(): msg = ( - "CF var '{}' contains unmasked data points equal to the " - "fill-value, {}. As saved, these points will read back " - "as missing data. To save these as normal values, " - "`_FillValue` needs to be set to not equal any valid data " - "points. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." + "Cannot call Saver.complete() until its dataset is closed, " + "i.e. the saver's context has exited." ) - warnings.warn(msg.format(cf_var.name, fill_value)) + raise ValueError(msg) + + delayed_write = self.delayed_completion() + # Complete the saves now, and handle any delayed warnings that occurred + result_warnings = delayed_write.compute() + if issue_warnings: + # Issue any delayed warnings from the compute. + for delayed_warning in result_warnings: + warnings.warn(delayed_warning) + + return result_warnings def save( @@ -2526,6 +2594,7 @@ def save( least_significant_digit=None, packing=None, fill_value=None, + compute=True, ): """ Save cube(s) to a netCDF file, given the cube and the filename. @@ -2550,6 +2619,11 @@ def save( * filename (string): Name of the netCDF file to save the cube(s). + **Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object. + + .. Note:: + When saving to a dataset, ``compute`` **must** be ``False`` : + See the ``compute`` parameter. Kwargs: @@ -2648,8 +2722,34 @@ def save( `:class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. + * compute (bool): + Default is ``True``, meaning complete the file immediately, and return ``None``. + + When ``False``, create the output file but don't write any lazy array content to + its variables, such as lazy cube data or aux-coord points and bounds. + Instead return a :class:`dask.delayed.Delayed` which, when computed, will + stream all the lazy content via :meth:`dask.store`, to complete the file. + Several such data saves can be performed in parallel, by passing a list of them + into a :func:`dask.compute` call. + + .. Note:: + when computed, the returned :class:`dask.delayed.Delayed` object returns + a list of :class:`Warning`\\s : These are any warnings which *would* have + been issued in the save call, if ``compute`` had been ``True``. + + .. Note:: + If saving to an open dataset instead of a filepath, then the caller + **must** specify ``compute=False``, and complete delayed saves **after + closing the dataset**. + This is because delayed saves may be performed in other processes : These + must (re-)open the dataset for writing, which will fail if the file is + still open for writing by the caller. + Returns: - None. + result (None, or dask.delayed.Delayed): + If `compute=True`, returns `None`. + Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed + writing to fill in the variables data. .. note:: @@ -2748,7 +2848,9 @@ def is_valid_packspec(p): raise ValueError(msg) # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: + # N.B. make the Saver compute=False, as we want control over creation of the + # delayed-completion object. + with Saver(filename, netcdf_format, compute=compute) as sman: # Iterate through the cubelist. for cube, packspec, fill_value in zip(cubes, packspecs, fill_values): sman.write( @@ -2793,3 +2895,12 @@ def is_valid_packspec(p): # Add conventions attribute. sman.update_global_attributes(Conventions=conventions) + + if compute: + # No more to do, since we used Saver(compute=True). + result = None + else: + # Return a delayed completion object. + result = sman.delayed_completion() + + return result diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index cff088cf89..ad0c6272ad 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1678,7 +1678,7 @@ def load(filename, read_data=False, little_ended=False): def _interpret_fields(fields): """ - Turn the fields read with load and FF2PP._extract_field into useable + Turn the fields read with load and FF2PP._extract_field into usable fields. One of the primary purposes of this function is to either convert "deferred bytes" into "deferred arrays" or "loaded bytes" into actual numpy arrays (via the _create_field_data) function. diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 51940b7c4d..707fd58757 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -404,15 +404,15 @@ def _load_pairs_from_fields_and_filenames( def load_pairs_from_fields(fields, converter): """ Convert an iterable of fields into an iterable of Cubes using the - provided convertor. + provided converter. Args: * fields: An iterable of fields. - * convertor: - An Iris convertor function, suitable for use with the supplied fields. + * converter: + An Iris converter function, suitable for use with the supplied fields. See the description in :class:`iris.fileformats.rules.Loader`. Returns: diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index 01539960a5..b93b192bbd 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -88,7 +88,7 @@ 'm01s00i012': CFName('mass_fraction_of_cloud_ice_in_air', None, 'kg kg-1'), 'm01s00i013': CFName('convective_cloud_area_fraction', None, '1'), 'm01s00i020': CFName('soil_temperature', None, 'K'), - 'm01s00i023': CFName('snowfall_amount', None, 'kg m-2'), + 'm01s00i023': CFName('surface_snow_amount', None, 'kg m-2'), 'm01s00i024': CFName('surface_temperature', None, 'K'), 'm01s00i025': CFName('atmosphere_boundary_layer_thickness', None, 'm'), 'm01s00i026': CFName('surface_roughness_length', None, 'm'), @@ -1207,7 +1207,7 @@ CFName('sea_ice_thickness', None, 'm'): 687, CFName('sea_surface_elevation', None, 'm'): 608, CFName('snow_grain_size', None, '1e-6 m'): 1507, - CFName('snowfall_amount', None, 'kg m-2'): 93, + CFName('surface_snow_amount', None, 'kg m-2'): 93, CFName('snowfall_flux', None, 'kg m-2 s-1'): 108, CFName('soil_albedo', None, '1'): 1395, CFName('soil_carbon_content', None, 'kg m-2'): 1397, diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 7dd08c723c..4e5004ff10 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -94,6 +94,8 @@ def decode_uri(uri, default="file"): In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are accepted. + It also supports 'bare objects', i.e. anything which is not a string. + These are identified with a scheme of 'data', and returned unchanged. .. testsetup:: @@ -119,20 +121,31 @@ def decode_uri(uri, default="file"): >>> print(decode_uri('dataZoo/...')) ('file', 'dataZoo/...') + >>> print(decode_uri({})) + ('data', {}) + """ if isinstance(uri, pathlib.PurePath): uri = str(uri) - # make sure scheme has at least 2 letters to avoid windows drives - # put - last in the brackets so it refers to the character, not a range - # reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1 - match = re.match(r"^([a-zA-Z][a-zA-Z0-9+.-]+):(.+)", uri) - if match: - scheme = match.group(1) - part = match.group(2) + + if isinstance(uri, str): + # make sure scheme has at least 2 letters to avoid windows drives + # put - last in the brackets so it refers to the character, not a range + # reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1 + match = re.match(r"^([a-zA-Z][a-zA-Z0-9+.-]+):(.+)", uri) + if match: + scheme = match.group(1) + part = match.group(2) + else: + # Catch bare UNIX and Windows paths + scheme = default + part = uri else: - # Catch bare UNIX and Windows paths - scheme = default + # We can pass things other than strings, like open files. + # These are simply identified as 'data objects'. + scheme = "data" part = uri + return scheme, part @@ -216,7 +229,7 @@ def load_files(filenames, callback, constraints=None): ) handler_map[handling_format_spec].append(fn) - # Call each iris format handler with the approriate filenames + # Call each iris format handler with the appropriate filenames for handling_format_spec in sorted(handler_map): fnames = handler_map[handling_format_spec] if handling_format_spec.constraint_aware_handler: @@ -240,6 +253,13 @@ def load_http(urls, callback): intended interface for loading is :func:`iris.load`. """ + # + # NOTE: this routine is *also* called by "load_data_objects", in which case the + # 'urls' will actually be 'data objects'. + # In principle, however, their scopes are different, so it's just an implementation + # detail that right now the same code will do for both. + # If that changes sometime, the two routines may go their separate ways. + # Create default dict mapping iris format handler to its associated filenames from iris.fileformats import FORMAT_AGENT @@ -255,6 +275,26 @@ def load_http(urls, callback): yield cube +def load_data_objects(urls, callback): + """ + Takes a list of data-source objects and a callback function, and returns a + generator of Cubes. + The 'objects' take the place of 'uris' in the load calls. + The appropriate types of the data-source objects are expected to be + recognised by the handlers : This is done in the usual way by passing the + context to the format picker to get a handler for each. + + .. note:: + + Typically, this function should not be called directly; instead, the + intended interface for loading is :func:`iris.load`. + + """ + # NOTE: this operation is currently *identical* to the http one. But it seems + # sensible to provide a distinct handler function for this scheme. + yield from load_http(urls, callback) + + def _dot_save(cube, target): # A simple wrapper for `iris.fileformats.dot.save` which allows the # saver to be registered without triggering the import of @@ -454,7 +494,7 @@ def save(source, target, saver=None, **kwargs): # Single cube? if isinstance(source, Cube): - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) # CubeList or sequence of cubes? elif isinstance(source, CubeList) or ( @@ -477,9 +517,13 @@ def save(source, target, saver=None, **kwargs): if i != 0: kwargs["append"] = True saver(cube, target, **kwargs) + + result = None # Netcdf saver. else: - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) else: raise ValueError("Cannot save; non Cube found in source") + + return result diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index a8e333c566..9def0ada98 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -331,3 +331,22 @@ def get_element(self, basename, file_handle): from iris.io import decode_uri return decode_uri(basename)[0] + + +class DataSourceObjectProtocol(FileElement): + """ + A :class:`FileElement` that simply returns the URI entry itself. + + This enables a arbitrary non-string data object to be passed, subject to + subsequent checks on the object itself (specified in the handler). + + """ + + def __init__(self): + super().__init__(requires_fh=False) + + def get_element(self, basename, file_handle): + # In this context, there should *not* be a file opened by the handler. + # Just return 'basename', which in this case is not a name, or even a + # string, but a passed 'data object'. + return basename diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index d6bac77d3b..cf16c9cbe6 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -233,7 +233,7 @@ def __init__(self, cubes, requested_dims_by_cube, ordered, coords_by_cube): break # If a coordinate with an equivalent definition (i.e. same # metadata) is not found in the master_dimensioned_coord_list, - # add the coords assocaited with the dimension to the list, + # add the coords associated with the dimension to the list, # add the size of the dimension to the master_dims_index and # store the offset. if not found: diff --git a/lib/iris/palette.py b/lib/iris/palette.py index a1c0a1e878..3ba17ffc97 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -57,7 +57,7 @@ def is_brewer(cmap): def _default_cmap_norm(args, kwargs): """ - This function injects default cmap and norm behavour into the keyword + This function injects default cmap and norm behaviour into the keyword arguments, based on the cube referenced within the positional arguments. """ cube = None diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 417b6b11de..4d6681e94e 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -159,6 +159,8 @@ def as_cube( as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) + Since this function converts to/from a Pandas object, laziness will not be preserved. + """ message = ( "iris.pandas.as_cube has been deprecated, and will be removed in a " @@ -170,7 +172,7 @@ def as_cube( if pandas_array.ndim not in [1, 2]: raise ValueError( "Only 1D or 2D Pandas arrays " - "can currently be conveted to Iris cubes." + "can currently be converted to Iris cubes." ) # Make the copy work consistently across NumPy 1.6 and 1.7. @@ -240,6 +242,8 @@ def as_cubes( :class:`dask.dataframe.DataFrame`\\ s are not supported. + Since this function converts to/from a Pandas object, laziness will not be preserved. + Examples -------- >>> from iris.pandas import as_cubes @@ -341,12 +345,13 @@ def as_cubes( ... var_name="longitude", ... value_name="air_temperature" ... ) + >>> my_df["longitude"] = my_df["longitude"].infer_objects() >>> print(my_df) - latitude longitude air_temperature - 0 35 0 300 - 1 25 0 301 - 2 35 10 302 - 3 25 10 303 + latitude longitude air_temperature + 0 35 0 300 + 1 25 0 301 + 2 35 10 302 + 3 25 10 303 >>> my_df = my_df.set_index(["latitude", "longitude"]) >>> my_df = my_df.sort_index() >>> converted_cube = as_cubes(my_df)[0] @@ -599,6 +604,10 @@ def as_series(cube, copy=True): If you have a large array that cannot be copied, make sure it is not masked and use copy=False. + Notes + ------ + Since this function converts to/from a Pandas object, laziness will not be preserved. + """ message = ( "iris.pandas.as_series has been deprecated, and will be removed in a " @@ -809,6 +818,10 @@ def as_data_frame( 419903 298.995148 Name: surface_temperature, Length: 419904, dtype: float32 + Notes + ------ + Since this function converts to/from a Pandas object, laziness will not be preserved. + """ def merge_metadata(meta_var_list): diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 8cd849b716..d319c1361b 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -904,7 +904,7 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): ax = plt.gca() if not isinstance(ax, cartopy.mpl.geoaxes.GeoAxes): - fig = plt.gcf() + fig = ax.get_figure() if isinstance(ax, matplotlib.axes.SubplotBase): _ = fig.add_subplot( ax.get_subplotspec(), @@ -1112,6 +1112,11 @@ def contour(cube, *args, **kwargs): See :func:`matplotlib.pyplot.contour` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ result = _draw_2d_from_points("contour", None, cube, *args, **kwargs) return result @@ -1136,6 +1141,11 @@ def contourf(cube, *args, **kwargs): See :func:`matplotlib.pyplot.contourf` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ coords = kwargs.get("coords") kwargs.setdefault("antialiased", True) @@ -1200,6 +1210,11 @@ def default_projection(cube): import matplotlib.pyplot as plt ax = plt.ax(projection=default_projection(cube)) + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # XXX logic seems flawed, but it is what map_setup did... cs = cube.coord_system("CoordSystem") @@ -1218,6 +1233,11 @@ def default_projection_extent(cube, mode=iris.coords.POINT_MODE): points, or the limits of the cell's bounds. The default is iris.coords.POINT_MODE. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ extents = cartography._xy_range(cube, mode) xlim = extents[0] @@ -1255,7 +1275,13 @@ def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args): def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at cell boundaries from the given Cube.""" + """Plots orography defined at cell boundaries from the given Cube. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # XXX Needs contiguous orography corners to work. raise NotImplementedError( @@ -1288,7 +1314,13 @@ def horiz_plot(v_coord, orography, style_args): def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at sample points from the given Cube.""" + """Plots orography defined at sample points from the given Cube. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ style_args = {"facecolor": facecolor} @@ -1334,6 +1366,11 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): The axes to use for drawing. Defaults to the current axes if none provided. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ result = _draw_2d_from_bounds( "pcolormesh", @@ -1376,6 +1413,11 @@ def pcolor(cube, *args, **kwargs): See :func:`matplotlib.pyplot.pcolor` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ kwargs.setdefault("antialiased", True) kwargs.setdefault("snap", False) @@ -1410,6 +1452,11 @@ def pcolormesh(cube, *args, **kwargs): See :func:`matplotlib.pyplot.pcolormesh` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ result = _draw_2d_from_bounds("pcolormesh", cube, *args, **kwargs) return result @@ -1435,6 +1482,11 @@ def points(cube, *args, **kwargs): See :func:`matplotlib.pyplot.scatter` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ def _scatter_args(u, v, data, *args, **kwargs): @@ -1526,6 +1578,11 @@ def barbs(u_cube, v_cube, *args, **kwargs): See :func:`matplotlib.pyplot.barbs` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # # TODO: check u + v cubes for compatibility. @@ -1576,6 +1633,11 @@ def quiver(u_cube, v_cube, *args, **kwargs): See :func:`matplotlib.pyplot.quiver` for details of other valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # # TODO: check u + v cubes for compatibility. @@ -1622,6 +1684,11 @@ def plot(*args, **kwargs): See :func:`matplotlib.pyplot.plot` for details of additional valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if "coords" in kwargs: raise TypeError( @@ -1654,6 +1721,11 @@ def scatter(x, y, *args, **kwargs): See :func:`matplotlib.pyplot.scatter` for details of additional valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # here we are more specific about argument types than generic 1d plotting if not isinstance(x, (iris.cube.Cube, iris.coords.Coord)): @@ -1689,6 +1761,11 @@ def fill_between(x, y1, y2, *args, **kwargs): See :func:`matplotlib.pyplot.fill_between` for details of additional valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # here we are more specific about argument types than generic 1d plotting if not isinstance(x, (iris.cube.Cube, iris.coords.Coord)): @@ -1704,6 +1781,41 @@ def fill_between(x, y1, y2, *args, **kwargs): ) +def hist(x, *args, **kwargs): + """ + Compute and plot a histogram. + + Args: + + * x: + A :class:`~iris.cube.Cube`, :class:`~iris.coords.Coord`, + :class:`~iris.coords.CellMeasure`, or :class:`~iris.coords.AncillaryVariable` + that will be used as the values that will be used to create the + histogram. + Note that if a coordinate is given, the points are used, ignoring the + bounds. + + See :func:`matplotlib.pyplot.hist` for details of additional valid + keyword arguments. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ + if isinstance(x, iris.cube.Cube): + data = x.data + elif isinstance(x, iris.coords._DimensionalMetadata): + data = x._values + else: + raise TypeError( + "x must be a cube, coordinate, cell measure or " + "ancillary variable." + ) + return plt.hist(data, *args, **kwargs) + + # Provide convenience show method from pyplot show = plt.show @@ -1737,6 +1849,11 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): * units: ['inches', 'points'] The unit for the symbol size. + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if axes is None: axes = plt.gca() @@ -1862,6 +1979,11 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): >>> ani = iplt.animate(cube_iter, qplt.contourf) >>> iplt.show() + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ kwargs.setdefault("interval", 100) coords = kwargs.pop("coords", None) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 6006314265..9209d4b3b7 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -174,6 +174,11 @@ def contour(cube, *args, **kwargs): See :func:`iris.plot.contour` for details of valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -201,6 +206,10 @@ def contourf(cube, *args, **kwargs): See :func:`iris.plot.contourf` for details of valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -229,6 +238,11 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): The width of the lines showing the cell outlines. If None, the default width in patch.linewidth in matplotlibrc is used. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ result = iplt.outline( cube, color=color, linewidth=linewidth, coords=coords, axes=axes @@ -244,6 +258,10 @@ def pcolor(cube, *args, **kwargs): See :func:`iris.plot.pcolor` for details of valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -258,6 +276,11 @@ def pcolormesh(cube, *args, **kwargs): See :func:`iris.plot.pcolormesh` for details of valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -272,6 +295,11 @@ def points(cube, *args, **kwargs): See :func:`iris.plot.points` for details of valid keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -288,6 +316,11 @@ def plot(*args, **kwargs): See :func:`iris.plot.plot` for details of valid arguments and keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.plot(*args, **kwargs) @@ -303,6 +336,11 @@ def scatter(x, y, *args, **kwargs): See :func:`iris.plot.scatter` for details of valid arguments and keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.scatter(x, y, *args, **kwargs) @@ -317,6 +355,10 @@ def fill_between(x, y1, y2, *args, **kwargs): See :func:`iris.plot.fill_between` for details of valid arguments and keyword arguments. + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ axes = kwargs.get("axes") result = iplt.fill_between(x, y1, y2, *args, **kwargs) @@ -324,5 +366,35 @@ def fill_between(x, y1, y2, *args, **kwargs): return result +def hist(x, *args, **kwargs): + """ + Compute and plot a labelled histogram. + + See :func:`iris.plot.hist` for details of valid arguments and + keyword arguments. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ + axes = kwargs.get("axes") + result = iplt.hist(x, *args, **kwargs) + title = _title(x, with_units=False) + label = _title(x, with_units=True) + + if axes is None: + axes = plt.gca() + + orientation = kwargs.get("orientation") + if orientation == "horizontal": + axes.set_ylabel(label) + else: + axes.set_xlabel(label) + axes.set_title(title) + + return result + + # Provide a convenience show method from pyplot. show = plt.show diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index c654483bfd..ffd03e6f4d 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -42,7 +42,7 @@ def check_tiff_header(self, tiff_filename, expect_keys, expect_entries): def check_tiff(self, cube, header_keys, header_items): # Check that the cube saves correctly to TIFF : # * the header contains expected keys and (some) values - # * the data array retrives correctly + # * the data array retrieves correctly import iris.experimental.raster with self.temp_filename(".tif") as temp_filename: diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 544d989564..5ee555cb6e 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -113,10 +113,10 @@ def write_repo_json(data: Dict[str, str]) -> None: string_data = {} for key, val in data.items(): string_data[key] = str(val) - with open(IMAGE_REPO_PATH, "wb") as fo: + with open(IMAGE_REPO_PATH, "wb") as fout: json.dump( string_data, - codecs.getwriter("utf-8")(fo), + codecs.getwriter("utf-8")(fout), indent=4, sort_keys=True, ) diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 091ecd4378..1f39b2589d 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -16,13 +16,43 @@ import cf_units import numpy as np -from iris._concatenate import concatenate +from iris._concatenate import _DerivedCoordAndDims, concatenate +import iris.aux_factory import iris.coords import iris.cube import iris.tests.stock as stock from iris.util import unify_time_units +class Test_DerivedCoordAndDims: + def test_equal(self): + assert _DerivedCoordAndDims( + "coord", "dims", "aux_factory" + ) == _DerivedCoordAndDims("coord", "dims", "aux_factory") + + def test_non_equal_coord(self): + assert _DerivedCoordAndDims( + "coord_0", "dims", "aux_factory" + ) != _DerivedCoordAndDims("coord_1", "dims", "aux_factory") + + def test_non_equal_dims(self): + assert _DerivedCoordAndDims( + "coord", "dims_0", "aux_factory" + ) != _DerivedCoordAndDims("coord", "dims_1", "aux_factory") + + def test_non_equal_aux_factory(self): + # Note: aux factories are not taken into account for equality! + assert _DerivedCoordAndDims( + "coord", "dims", "aux_factory_0" + ) == _DerivedCoordAndDims("coord", "dims", "aux_factory_1") + + def test_non_equal_types(self): + assert ( + _DerivedCoordAndDims("coord", "dims", "aux_factory") + != "I am not a _DerivedCoordAndDims" + ) + + class Test_concatenate__epoch(tests.IrisTest): def simple_1d_time_cubes(self, reftimes, coords_points): cubes = [] @@ -187,6 +217,127 @@ def test_ignore_diff_ancillary_variables(self): self.assertEqual(result[0].shape, (4, 2)) +class Test_cubes_with_derived_coord(tests.IrisTest): + def create_cube(self): + data = np.arange(4).reshape(2, 2) + aux_factories = [] + + # DimCoords + sigma = iris.coords.DimCoord([0.0, 10.0], var_name="sigma", units="1") + t_unit = cf_units.Unit( + "hours since 1970-01-01 00:00:00", calendar="standard" + ) + time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) + + # AtmosphereSigmaFactory (does not span concatenated dim) + ptop = iris.coords.AuxCoord(100.0, var_name="ptop", units="Pa") + surface_p = iris.coords.AuxCoord([1.0, 2.0], var_name="ps", units="Pa") + aux_factories.append( + iris.aux_factory.AtmosphereSigmaFactory(ptop, sigma, surface_p) + ) + + # HybridHeightFactory (span concatenated dim) + delta = iris.coords.AuxCoord(10.0, var_name="delta", units="m") + orog = iris.coords.AuxCoord(data, var_name="orog", units="m") + aux_factories.append( + iris.aux_factory.HybridHeightFactory(delta, sigma, orog) + ) + + dim_coords_and_dims = [(time, 0), (sigma, 1)] + aux_coords_and_dims = [ + (ptop, ()), + (delta, ()), + (surface_p, 1), + (orog, (0, 1)), + ] + + cube = iris.cube.Cube( + data, + standard_name="air_temperature", + units="K", + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims, + aux_factories=aux_factories, + ) + return cube + + def test_equal_derived_coords(self): + cube_a = self.create_cube() + cube_b = cube_a.copy() + cube_b.coord("time").points = [12, 18] + + result = concatenate([cube_a, cube_b]) + self.assertEqual(len(result), 1) + self.assertEqual(result[0].shape, (4, 2)) + + np.testing.assert_allclose( + result[0].coord("air_pressure").points, [100.0, -880.0] + ) + np.testing.assert_allclose( + result[0].coord("altitude").points, + [[10.0, 20.0], [10.0, 40.0], [10.0, 20.0], [10.0, 40.0]], + ) + + def test_equal_derived_coords_with_bounds(self): + cube_a = self.create_cube() + cube_a.coord("sigma").bounds = [[0.0, 5.0], [5.0, 20.0]] + cube_b = cube_a.copy() + cube_b.coord("time").points = [12, 18] + + result = concatenate([cube_a, cube_b]) + self.assertEqual(len(result), 1) + self.assertEqual(result[0].shape, (4, 2)) + + np.testing.assert_allclose( + result[0].coord("air_pressure").bounds, + [[100.0, -395.0], [-390.0, -1860.0]], + ) + + def test_diff_altitude(self): + """Gives one cube since altitude spans concatenation dim.""" + cube_a = self.create_cube() + cube_b = cube_a.copy() + cube_b.coord("time").points = [12, 18] + cube_b.coord("orog").points = [[0, 0], [0, 0]] + + result = concatenate([cube_a, cube_b]) + self.assertEqual(len(result), 1) + self.assertEqual(result[0].shape, (4, 2)) + + np.testing.assert_allclose( + result[0].coord("altitude").points, + [[10.0, 20.0], [10.0, 40.0], [10.0, 10.0], [10.0, 10.0]], + ) + + def test_diff_air_pressure(self): + """Gives two cubes since altitude does not span concatenation dim.""" + cube_a = self.create_cube() + cube_b = cube_a.copy() + cube_b.coord("time").points = [12, 18] + cube_b.coord("ps").points = [10.0, 20.0] + + result = concatenate([cube_a, cube_b], check_aux_coords=False) + self.assertEqual(len(result), 2) + + def test_ignore_diff_air_pressure(self): + cube_a = self.create_cube() + cube_b = cube_a.copy() + cube_b.coord("time").points = [12, 18] + cube_b.coord("ps").points = [10.0, 20.0] + + result = concatenate( + [cube_a, cube_b], + check_aux_coords=False, + check_derived_coords=False, + ) + self.assertEqual(len(result), 1) + self.assertEqual(result[0].shape, (4, 2)) + + np.testing.assert_allclose( + result[0].coord("air_pressure").points, [100.0, -880.0] + ) + + class Test_anonymous_dims(tests.IrisTest): def setUp(self): data = np.arange(12).reshape(2, 3, 2) diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index a510ef7257..318292615b 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -419,7 +419,7 @@ def test_load_raw(self): expected = CubeList(fldset_1 + fldset_2) else: # 'Raw' cubes have combined (vector) times within each file. - # The 'other' phenomenon appears seperately. + # The 'other' phenomenon appears separately. expected = CubeList( [ CubeList(fldset_1[:2]).merge_cube(), diff --git a/lib/iris/tests/runner/__init__.py b/lib/iris/tests/integration/netcdf/__init__.py similarity index 75% rename from lib/iris/tests/runner/__init__.py rename to lib/iris/tests/integration/netcdf/__init__.py index b561e1cf87..f500b52520 100644 --- a/lib/iris/tests/runner/__init__.py +++ b/lib/iris/tests/integration/netcdf/__init__.py @@ -3,7 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -""" -Empty file to allow import. - -""" +"""Integration tests for loading and saving netcdf files.""" diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py new file mode 100644 index 0000000000..c41af1b356 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -0,0 +1,115 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. + +Note: these integration tests replace any unit testing of this module, due to its total +dependence on Dask, and even on Dask's implementation details rather than supported +and documented API and behaviour. +So (a) it is essential to check the module's behaviour against actual Dask operation, +and (b) mock-ist testing of the implementation code in isolation would not add anything +of much value. +""" +import dask +import dask.config +import distributed +import pytest + +from iris.fileformats.netcdf._dask_locks import ( + DaskSchedulerTypeError, + dask_scheduler_is_distributed, + get_dask_array_scheduler_type, + get_worker_lock, +) + + +@pytest.fixture( + params=[ + "UnspecifiedScheduler", + "ThreadedScheduler", + "SingleThreadScheduler", + "ProcessScheduler", + "DistributedScheduler", + ] +) +def dask_scheduler(request): + # Control Dask to enable a specific scheduler type. + sched_typename = request.param + if sched_typename == "UnspecifiedScheduler": + config_name = None + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + elif sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "ProcessScheduler": + config_name = "processes" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + if config_name is None: + context = None + else: + context = dask.config.set(scheduler=config_name) + context.__enter__() + + yield sched_typename + + if context: + context.__exit__(None, None, None) + + if config_name == "distributed": + _distributed_client.close() + + +def test_dask_scheduler_is_distributed(dask_scheduler): + result = dask_scheduler_is_distributed() + # Should return 'True' only with a distributed scheduler. + expected = dask_scheduler == "DistributedScheduler" + assert result == expected + + +def test_get_dask_array_scheduler_type(dask_scheduler): + result = get_dask_array_scheduler_type() + expected = { + "UnspecifiedScheduler": "threads", + "ThreadedScheduler": "threads", + "ProcessScheduler": "processes", + "SingleThreadScheduler": "single-threaded", + "DistributedScheduler": "distributed", + }[dask_scheduler] + assert result == expected + + +def test_get_worker_lock(dask_scheduler): + test_identity = "" + error = None + try: + result = get_worker_lock(test_identity) + except DaskSchedulerTypeError as err: + error = err + result = None + + if dask_scheduler == "ProcessScheduler": + assert result is None + assert isinstance(error, DaskSchedulerTypeError) + msg = 'scheduler type is "processes", which is not supported' + assert msg in error.args[0] + else: + assert error is None + assert result is not None + if dask_scheduler == "DistributedScheduler": + assert isinstance(result, distributed.Lock) + assert result.name == test_identity + else: + # low-level object doesn't have a readily available class for isinstance + assert all( + hasattr(result, att) + for att in ("acquire", "release", "locked") + ) diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py new file mode 100644 index 0000000000..a73d6c7d49 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_attributes.py @@ -0,0 +1,119 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for attribute-related loading and saving netcdf files.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from contextlib import contextmanager +from unittest import mock + +import iris +from iris.cube import Cube, CubeList +from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION + + +class TestUmVersionAttribute(tests.IrisTest): + def test_single_saves_as_global(self): + cube = Cube( + [1.0], + standard_name="air_temperature", + units="K", + attributes={"um_version": "4.3"}, + ) + with self.temp_filename(".nc") as nc_path: + iris.save(cube, nc_path) + self.assertCDL(nc_path) + + def test_multiple_same_saves_as_global(self): + cube_a = Cube( + [1.0], + standard_name="air_temperature", + units="K", + attributes={"um_version": "4.3"}, + ) + cube_b = Cube( + [1.0], + standard_name="air_pressure", + units="hPa", + attributes={"um_version": "4.3"}, + ) + with self.temp_filename(".nc") as nc_path: + iris.save(CubeList([cube_a, cube_b]), nc_path) + self.assertCDL(nc_path) + + def test_multiple_different_saves_on_variables(self): + cube_a = Cube( + [1.0], + standard_name="air_temperature", + units="K", + attributes={"um_version": "4.3"}, + ) + cube_b = Cube( + [1.0], + standard_name="air_pressure", + units="hPa", + attributes={"um_version": "4.4"}, + ) + with self.temp_filename(".nc") as nc_path: + iris.save(CubeList([cube_a, cube_b]), nc_path) + self.assertCDL(nc_path) + + +@contextmanager +def _patch_site_configuration(): + def cf_patch_conventions(conventions): + return ", ".join([conventions, "convention1, convention2"]) + + def update(config): + config["cf_profile"] = mock.Mock(name="cf_profile") + config["cf_patch"] = mock.Mock(name="cf_patch") + config["cf_patch_conventions"] = cf_patch_conventions + + orig_site_config = iris.site_configuration.copy() + update(iris.site_configuration) + yield + iris.site_configuration = orig_site_config + + +class TestConventionsAttributes(tests.IrisTest): + def test_patching_conventions_attribute(self): + # Ensure that user defined conventions are wiped and those which are + # saved patched through site_config can be loaded without an exception + # being raised. + cube = Cube( + [1.0], + standard_name="air_temperature", + units="K", + attributes={"Conventions": "some user defined conventions"}, + ) + + # Patch the site configuration dictionary. + with _patch_site_configuration(), self.temp_filename(".nc") as nc_path: + iris.save(cube, nc_path) + res = iris.load_cube(nc_path) + + self.assertEqual( + res.attributes["Conventions"], + "{}, {}, {}".format( + CF_CONVENTIONS_VERSION, "convention1", "convention2" + ), + ) + + +class TestStandardName(tests.IrisTest): + def test_standard_name_roundtrip(self): + standard_name = "air_temperature detection_minimum" + cube = iris.cube.Cube(1, standard_name=standard_name) + with self.temp_filename(suffix=".nc") as fout: + iris.save(cube, fout) + detection_limit_cube = iris.load_cube(fout) + self.assertEqual(detection_limit_cube.standard_name, standard_name) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py new file mode 100644 index 0000000000..d89f275336 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py @@ -0,0 +1,160 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for aux-factory-related loading and saving netcdf files.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import iris +from iris.tests import stock as stock + + +@tests.skip_data +class TestAtmosphereSigma(tests.IrisTest): + def setUp(self): + # Modify stock cube so it is suitable to have a atmosphere sigma + # factory added to it. + cube = stock.realistic_4d_no_derived() + cube.coord("surface_altitude").rename("surface_air_pressure") + cube.coord("surface_air_pressure").units = "Pa" + cube.coord("sigma").units = "1" + ptop_coord = iris.coords.AuxCoord(1000.0, var_name="ptop", units="Pa") + cube.add_aux_coord(ptop_coord, ()) + cube.remove_coord("level_height") + # Construct and add atmosphere sigma factory. + factory = iris.aux_factory.AtmosphereSigmaFactory( + cube.coord("ptop"), + cube.coord("sigma"), + cube.coord("surface_air_pressure"), + ) + cube.add_aux_factory(factory) + self.cube = cube + + def test_save(self): + with self.temp_filename(suffix=".nc") as filename: + iris.save(self.cube, filename) + self.assertCDL(filename) + + def test_save_load_loop(self): + # Ensure that the AtmosphereSigmaFactory is automatically loaded + # when loading the file. + with self.temp_filename(suffix=".nc") as filename: + iris.save(self.cube, filename) + cube = iris.load_cube(filename, "air_potential_temperature") + assert cube.coords("air_pressure") + + +@tests.skip_data +class TestHybridPressure(tests.IrisTest): + def setUp(self): + # Modify stock cube so it is suitable to have a + # hybrid pressure factory added to it. + cube = stock.realistic_4d_no_derived() + cube.coord("surface_altitude").rename("surface_air_pressure") + cube.coord("surface_air_pressure").units = "Pa" + cube.coord("level_height").rename("level_pressure") + cube.coord("level_pressure").units = "Pa" + # Construct and add hybrid pressure factory. + factory = iris.aux_factory.HybridPressureFactory( + cube.coord("level_pressure"), + cube.coord("sigma"), + cube.coord("surface_air_pressure"), + ) + cube.add_aux_factory(factory) + self.cube = cube + + def test_save(self): + with self.temp_filename(suffix=".nc") as filename: + iris.save(self.cube, filename) + self.assertCDL(filename) + + def test_save_load_loop(self): + # Tests an issue where the variable names in the formula + # terms changed to the standard_names instead of the variable names + # when loading a previously saved cube. + with self.temp_filename(suffix=".nc") as filename, self.temp_filename( + suffix=".nc" + ) as other_filename: + iris.save(self.cube, filename) + cube = iris.load_cube(filename, "air_potential_temperature") + iris.save(cube, other_filename) + other_cube = iris.load_cube( + other_filename, "air_potential_temperature" + ) + self.assertEqual(cube, other_cube) + + +@tests.skip_data +class TestSaveMultipleAuxFactories(tests.IrisTest): + def test_hybrid_height_and_pressure(self): + cube = stock.realistic_4d() + cube.add_aux_coord( + iris.coords.DimCoord( + 1200.0, long_name="level_pressure", units="hPa" + ) + ) + cube.add_aux_coord( + iris.coords.DimCoord(0.5, long_name="other sigma", units="1") + ) + cube.add_aux_coord( + iris.coords.DimCoord( + 1000.0, long_name="surface_air_pressure", units="hPa" + ) + ) + factory = iris.aux_factory.HybridPressureFactory( + cube.coord("level_pressure"), + cube.coord("other sigma"), + cube.coord("surface_air_pressure"), + ) + cube.add_aux_factory(factory) + with self.temp_filename(suffix=".nc") as filename: + iris.save(cube, filename) + self.assertCDL(filename) + + def test_shared_primary(self): + cube = stock.realistic_4d() + factory = iris.aux_factory.HybridHeightFactory( + cube.coord("level_height"), + cube.coord("sigma"), + cube.coord("surface_altitude"), + ) + factory.rename("another altitude") + cube.add_aux_factory(factory) + with self.temp_filename( + suffix=".nc" + ) as filename, self.assertRaisesRegex( + ValueError, "multiple aux factories" + ): + iris.save(cube, filename) + + def test_hybrid_height_cubes(self): + hh1 = stock.simple_4d_with_hybrid_height() + hh1.attributes["cube"] = "hh1" + hh2 = stock.simple_4d_with_hybrid_height() + hh2.attributes["cube"] = "hh2" + sa = hh2.coord("surface_altitude") + sa.points = sa.points * 10 + with self.temp_filename(".nc") as fname: + iris.save([hh1, hh2], fname) + cubes = iris.load(fname, "air_temperature") + cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"]) + self.assertCML(cubes) + + def test_hybrid_height_cubes_on_dimension_coordinate(self): + hh1 = stock.hybrid_height() + hh2 = stock.hybrid_height() + sa = hh2.coord("surface_altitude") + sa.points = sa.points * 10 + emsg = "Unable to create dimensonless vertical coordinate." + with self.temp_filename(".nc") as fname, self.assertRaisesRegex( + ValueError, emsg + ): + iris.save([hh1, hh2], fname) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py new file mode 100644 index 0000000000..3175664b4c --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -0,0 +1,281 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for coord-system-related loading and saving netcdf files.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from os.path import join as path_join +import shutil +import tempfile + +import iris +from iris.coords import DimCoord +from iris.cube import Cube +from iris.tests import stock as stock +from iris.tests.stock.netcdf import ncgen_from_cdl +from iris.tests.unit.fileformats.netcdf.loader import test_load_cubes as tlc + + +@tests.skip_data +class TestCoordSystem(tests.IrisTest): + def setUp(self): + tlc.setUpModule() + + def tearDown(self): + tlc.tearDownModule() + + def test_load_laea_grid(self): + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_laea.cml")) + + datum_cf_var_cdl = """ + netcdf output { + dimensions: + y = 4 ; + x = 3 ; + variables: + float data(y, x) ; + data :standard_name = "toa_brightness_temperature" ; + data :units = "K" ; + data :grid_mapping = "mercator" ; + int mercator ; + mercator:grid_mapping_name = "mercator" ; + mercator:longitude_of_prime_meridian = 0. ; + mercator:earth_radius = 6378169. ; + mercator:horizontal_datum_name = "OSGB36" ; + float y(y) ; + y:axis = "Y" ; + y:units = "m" ; + y:standard_name = "projection_y_coordinate" ; + float x(x) ; + x:axis = "X" ; + x:units = "m" ; + x:standard_name = "projection_x_coordinate" ; + + // global attributes: + :Conventions = "CF-1.7" ; + :standard_name_vocabulary = "CF Standard Name Table v27" ; + + data: + + data = + 0, 1, 2, + 3, 4, 5, + 6, 7, 8, + 9, 10, 11 ; + + mercator = _ ; + + y = 1, 2, 3, 5 ; + + x = -6, -4, -2 ; + + } + """ + + datum_wkt_cdl = """ +netcdf output5 { +dimensions: + y = 4 ; + x = 3 ; +variables: + float data(y, x) ; + data :standard_name = "toa_brightness_temperature" ; + data :units = "K" ; + data :grid_mapping = "mercator" ; + int mercator ; + mercator:grid_mapping_name = "mercator" ; + mercator:longitude_of_prime_meridian = 0. ; + mercator:earth_radius = 6378169. ; + mercator:longitude_of_projection_origin = 0. ; + mercator:false_easting = 0. ; + mercator:false_northing = 0. ; + mercator:scale_factor_at_projection_origin = 1. ; + mercator:crs_wkt = "PROJCRS[\\"unknown\\",BASEGEOGCRS[\\"unknown\\",DATUM[\\"OSGB36\\",ELLIPSOID[\\"unknown\\",6378169,0,LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8901]]],CONVERSION[\\"unknown\\",METHOD[\\"Mercator (variant B)\\",ID[\\"EPSG\\",9805]],PARAMETER[\\"Latitude of 1st standard parallel\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8823]],PARAMETER[\\"Longitude of natural origin\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8802]],PARAMETER[\\"False easting\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8806]],PARAMETER[\\"False northing\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8807]]],CS[Cartesian,2],AXIS[\\"(E)\\",east,ORDER[1],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]],AXIS[\\"(N)\\",north,ORDER[2],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]]" ; + float y(y) ; + y:axis = "Y" ; + y:units = "m" ; + y:standard_name = "projection_y_coordinate" ; + float x(x) ; + x:axis = "X" ; + x:units = "m" ; + x:standard_name = "projection_x_coordinate" ; + +// global attributes: + :standard_name_vocabulary = "CF Standard Name Table v27" ; + :Conventions = "CF-1.7" ; +data: + + data = + 0, 1, 2, + 3, 4, 5, + 6, 7, 8, + 9, 10, 11 ; + + mercator = _ ; + + y = 1, 2, 3, 5 ; + + x = -6, -4, -2 ; +} + """ + + def test_load_datum_wkt(self): + expected = "OSGB 1936" + nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + + def test_no_load_datum_wkt(self): + nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) + with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(actual, "unknown") + + def test_load_datum_cf_var(self): + expected = "OSGB 1936" + nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + + def test_no_load_datum_cf_var(self): + nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) + with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): + cube = iris.load_cube(nc_path) + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(actual, "unknown") + + def test_save_datum(self): + expected = "OSGB 1936" + saved_crs = iris.coord_systems.Mercator( + ellipsoid=iris.coord_systems.GeogCS.from_datum("OSGB36") + ) + + base_cube = stock.realistic_3d() + base_lat_coord = base_cube.coord("grid_latitude") + test_lat_coord = DimCoord( + base_lat_coord.points, + standard_name="projection_y_coordinate", + coord_system=saved_crs, + ) + base_lon_coord = base_cube.coord("grid_longitude") + test_lon_coord = DimCoord( + base_lon_coord.points, + standard_name="projection_x_coordinate", + coord_system=saved_crs, + ) + test_cube = Cube( + base_cube.data, + standard_name=base_cube.standard_name, + units=base_cube.units, + dim_coords_and_dims=( + (base_cube.coord("time"), 0), + (test_lat_coord, 1), + (test_lon_coord, 2), + ), + ) + + with self.temp_filename(suffix=".nc") as filename: + iris.save(test_cube, filename) + with iris.FUTURE.context(datum_support=True): + cube = iris.load_cube(filename) + + test_crs = cube.coord("projection_y_coordinate").coord_system + actual = str(test_crs.as_cartopy_crs().datum) + self.assertMultiLineEqual(expected, actual) + + +class TestLoadMinimalGeostationary(tests.IrisTest): + """ + Check we can load data with a geostationary grid-mapping, even when the + 'false-easting' and 'false_northing' properties are missing. + + """ + + _geostationary_problem_cdl = """ +netcdf geostationary_problem_case { +dimensions: + y = 2 ; + x = 3 ; +variables: + short radiance(y, x) ; + radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ; + radiance:units = "W m-2 sr-1 um-1" ; + radiance:coordinates = "y x" ; + radiance:grid_mapping = "imager_grid_mapping" ; + short y(y) ; + y:units = "rad" ; + y:axis = "Y" ; + y:long_name = "fixed grid projection y-coordinate" ; + y:standard_name = "projection_y_coordinate" ; + short x(x) ; + x:units = "rad" ; + x:axis = "X" ; + x:long_name = "fixed grid projection x-coordinate" ; + x:standard_name = "projection_x_coordinate" ; + int imager_grid_mapping ; + imager_grid_mapping:grid_mapping_name = "geostationary" ; + imager_grid_mapping:perspective_point_height = 35786023. ; + imager_grid_mapping:semi_major_axis = 6378137. ; + imager_grid_mapping:semi_minor_axis = 6356752.31414 ; + imager_grid_mapping:latitude_of_projection_origin = 0. ; + imager_grid_mapping:longitude_of_projection_origin = -75. ; + imager_grid_mapping:sweep_angle_axis = "x" ; + +data: + + // coord values, just so these can be dim-coords + y = 0, 1 ; + x = 0, 1, 2 ; + +} +""" + + @classmethod + def setUpClass(cls): + # Create a temp directory for transient test files. + cls.temp_dir = tempfile.mkdtemp() + cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl") + cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc") + # Create reference CDL and netcdf files from the CDL text. + ncgen_from_cdl( + cdl_str=cls._geostationary_problem_cdl, + cdl_path=cls.path_test_cdl, + nc_path=cls.path_test_nc, + ) + + @classmethod + def tearDownClass(cls): + # Destroy the temp directory. + shutil.rmtree(cls.temp_dir) + + def test_geostationary_no_false_offsets(self): + # Check we can load the test data and coordinate system properties are correct. + cube = iris.load_cube(self.path_test_nc) + # Check the coordinate system properties has the correct default properties. + cs = cube.coord_system() + self.assertIsInstance(cs, iris.coord_systems.Geostationary) + self.assertEqual(cs.false_easting, 0.0) + self.assertEqual(cs.false_northing, 0.0) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py new file mode 100644 index 0000000000..616feb3b0e --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -0,0 +1,339 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests for delayed saving. +""" +import warnings + +from cf_units import Unit +import dask.array as da +import dask.config +from dask.delayed import Delayed +import distributed +import numpy as np +import pytest + +import iris +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import SaverFillValueWarning +import iris.tests +from iris.tests.stock import realistic_4d + + +class Test__lazy_stream_data: + @pytest.fixture(autouse=True) + def output_path(self, tmp_path): + # A temporary output netcdf-file path, **unique to each test call**. + self.temp_output_filepath = tmp_path / "tmp.nc" + yield self.temp_output_filepath + + @pytest.fixture(autouse=True, scope="module") + def all_vars_lazy(self): + # For the operation of these tests, we want to force all netcdf variables + # to load as lazy data, i.e. **don't** use real data for 'small' ones. + old_value = iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + yield + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = old_value + + @staticmethod + @pytest.fixture(params=[False, True], ids=["SaveImmediate", "SaveDelayed"]) + def save_is_delayed(request): + return request.param + + @staticmethod + def make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=False, + data_is_maskedbytes=False, + include_extra_coordlikes=False, + ): + cube = realistic_4d() + + def fix_array(array): + """ + Make a new, custom array to replace the provided cube/coord data. + Optionally provide default-fill-value collisions, and/or replace with lazy + content. + """ + if array is not None: + if data_is_maskedbytes: + dmin, dmax = 0, 255 + else: + dmin, dmax = array.min(), array.max() + array = np.random.uniform(dmin, dmax, size=array.shape) + + if data_is_maskedbytes: + array = array.astype("u1") + array = np.ma.masked_array(array) + # To trigger, it must also have at least one *masked point*. + array[tuple([0] * array.ndim)] = np.ma.masked + + if ensure_fillvalue_collision: + # Set point at midpoint index = default-fill-value + fill_value = default_fillvals[array.dtype.str[1:]] + inds = tuple(dim // 2 for dim in array.shape) + array[inds] = fill_value + + if include_lazy_content: + # Make the array lazy. + # Ensure we always have multiple chunks (relatively small ones). + chunks = list(array.shape) + chunks[0] = 1 + array = da.from_array(array, chunks=chunks) + + return array + + # Replace the cube data, and one aux-coord, according to the control settings. + cube.data = fix_array(cube.data) + auxcoord = cube.coord("surface_altitude") + auxcoord.points = fix_array(auxcoord.points) + + if include_extra_coordlikes: + # Also concoct + attach an ancillary variable and a cell-measure, so we can + # check that they behave the same as coordinates. + ancil_dims = [0, 2] + cm_dims = [0, 3] + ancil_shape = [cube.shape[idim] for idim in ancil_dims] + cm_shape = [cube.shape[idim] for idim in cm_dims] + from iris.coords import AncillaryVariable, CellMeasure + + ancil = AncillaryVariable( + fix_array(np.zeros(ancil_shape)), long_name="sample_ancil" + ) + cube.add_ancillary_variable(ancil, ancil_dims) + cm = CellMeasure( + fix_array(np.zeros(cm_shape)), long_name="sample_cm" + ) + cube.add_cell_measure(cm, cm_dims) + return cube + + def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path): + input_filepath = iris.tests.get_data_path( + ["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"] + ) + original_cubes = iris.load(input_filepath) + + # Preempt some standard changes that an iris save will impose. + for cube in original_cubes: + if cube.units == Unit("-"): + # replace 'unknown unit' with 'no unit'. + cube.units = Unit("?") + # Fix conventions attribute to what iris.save outputs. + cube.attributes["Conventions"] = "CF-1.7" + + original_cubes = sorted(original_cubes, key=lambda cube: cube.name()) + result = iris.save( + original_cubes, output_path, compute=not save_is_delayed + ) + if save_is_delayed: + # In this case, must also "complete" the save. + result.compute() + reloaded_cubes = iris.load(output_path) + reloaded_cubes = sorted(reloaded_cubes, key=lambda cube: cube.name()) + assert reloaded_cubes == original_cubes + # NOTE: it might be nicer to use assertCDL, but unfortunately importing + # unittest.TestCase seems to lose us the ability to use fixtures. + + @classmethod + @pytest.fixture( + params=[ + "ThreadedScheduler", + "DistributedScheduler", + "SingleThreadScheduler", + ] + ) + def scheduler_type(cls, request): + sched_typename = request.param + if sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + with dask.config.set(scheduler=config_name): + yield sched_typename + + if config_name == "distributed": + _distributed_client.close() + + def test_scheduler_types( + self, output_path, scheduler_type, save_is_delayed + ): + # Check operation works and behaves the same with different schedulers, + # especially including distributed. + + # Just check that the dask scheduler is setup as 'expected'. + if scheduler_type == "ThreadedScheduler": + expected_dask_scheduler = "threads" + elif scheduler_type == "SingleThreadScheduler": + expected_dask_scheduler = "single-threaded" + else: + assert scheduler_type == "DistributedScheduler" + expected_dask_scheduler = "distributed" + + assert dask.config.get("scheduler") == expected_dask_scheduler + + # Use a testcase that produces delayed warnings (and check those too). + cube = self.make_testcube( + include_lazy_content=True, ensure_fillvalue_collision=True + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + if not save_is_delayed: + assert result is None + assert len(logged_warnings) == 2 + issued_warnings = [log.message for log in logged_warnings] + else: + assert result is not None + assert len(logged_warnings) == 0 + warnings.simplefilter("error") + issued_warnings = result.compute() + + assert len(issued_warnings) == 2 + expected_msg = "contains unmasked data points equal to the fill-value" + assert all( + expected_msg in warning.args[0] for warning in issued_warnings + ) + + def test_time_of_writing( + self, save_is_delayed, output_path, scheduler_type + ): + # Check when lazy data is *actually* written : + # - in 'immediate' mode, on initial file write + # - in 'delayed' mode, only when the delayed-write is computed. + original_cube = self.make_testcube(include_extra_coordlikes=True) + assert original_cube.has_lazy_data() + assert original_cube.coord("surface_altitude").has_lazy_points() + assert original_cube.cell_measure("sample_cm").has_lazy_data() + assert original_cube.ancillary_variable("sample_ancil").has_lazy_data() + + result = iris.save( + original_cube, + output_path, + compute=not save_is_delayed, + ) + assert save_is_delayed == (result is not None) + + # Read back : NOTE avoid loading the separate surface-altitude cube. + readback_cube = iris.load_cube( + output_path, "air_potential_temperature" + ) + # Check the components to be tested *are* lazy. See: self.all_vars_lazy(). + assert readback_cube.has_lazy_data() + assert readback_cube.coord("surface_altitude").has_lazy_points() + assert readback_cube.cell_measure("sample_cm").has_lazy_data() + assert readback_cube.ancillary_variable("sample_ancil").has_lazy_data() + + # If 'delayed', the lazy content should all be masked, otherwise none of it. + def getmask(cube_or_coord): + cube_or_coord = ( + cube_or_coord.copy() + ) # avoid realising the original + if hasattr(cube_or_coord, "points"): + data = cube_or_coord.points + else: + data = cube_or_coord.data + return np.ma.getmaskarray(data) + + test_components = [ + readback_cube, + readback_cube.coord("surface_altitude"), + readback_cube.ancillary_variable("sample_ancil"), + readback_cube.cell_measure("sample_cm"), + ] + + def fetch_masks(): + data_mask, coord_mask, ancil_mask, cm_mask = [ + getmask(data) for data in test_components + ] + return data_mask, coord_mask, ancil_mask, cm_mask + + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + if save_is_delayed: + assert np.all(data_mask) + assert np.all(coord_mask) + assert np.all(ancil_mask) + assert np.all(cm_mask) + else: + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + if save_is_delayed: + # Complete the write. + result.compute() + + # Re-fetch the lazy arrays. The data should now **not be masked**. + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + # All written now ? + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + @pytest.mark.parametrize( + "warning_type", ["WarnMaskedBytes", "WarnFillvalueCollision"] + ) + def test_fill_warnings(self, warning_type, output_path, save_is_delayed): + # Test collision warnings for data with fill-value collisions, or for masked + # byte data. + if warning_type == "WarnFillvalueCollision": + make_fv_collide = True + make_maskedbytes = False + expected_msg = ( + "contains unmasked data points equal to the fill-value" + ) + else: + assert warning_type == "WarnMaskedBytes" + make_fv_collide = False + make_maskedbytes = True + expected_msg = "contains byte data with masked points" + + cube = self.make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=make_fv_collide, + data_is_maskedbytes=make_maskedbytes, + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + result_warnings = [ + log.message + for log in logged_warnings + if isinstance(log.message, SaverFillValueWarning) + ] + + if save_is_delayed: + # Should have had *no* fill-warnings in the initial save. + assert len(result_warnings) == 0 + # Complete the operation now + with warnings.catch_warnings(): + # NOTE: warnings should *not* be issued here, instead they are returned. + warnings.simplefilter("error", category=SaverFillValueWarning) + result_warnings = result.compute() + + # Either way, we should now have 2 similar warnings. + assert len(result_warnings) == 2 + assert all( + expected_msg in warning.args[0] for warning in result_warnings + ) + + def test_no_delayed_writes(self, output_path): + # Just check that a delayed save returns a usable 'delayed' object, even when + # there is no lazy content = no delayed writes to perform. + cube = self.make_testcube(include_lazy_content=False) + warnings.simplefilter("error") + result = iris.save(cube, output_path, compute=False) + assert isinstance(result, Delayed) + assert result.compute() == [] diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py new file mode 100644 index 0000000000..dc0c29455f --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -0,0 +1,495 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for loading and saving netcdf files.""" +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from itertools import repeat +import os.path +from pathlib import Path +import shutil +import tempfile +from unittest import mock +import warnings + +import numpy as np +import numpy.ma as ma +import pytest + +import iris +import iris.coord_systems +from iris.coords import CellMethod +from iris.cube import Cube, CubeList +import iris.exceptions +from iris.fileformats.netcdf import Saver, UnknownCellMethodWarning + +# Get the netCDF4 module, but in a sneaky way that avoids triggering the "do not import +# netCDF4" check in "iris.tests.test_coding_standards.test_netcdf4_import()". +import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc + +nc = threadsafe_nc.netCDF4 + +from iris.tests.stock.netcdf import ncgen_from_cdl + + +class TestLazySave(tests.IrisTest): + @tests.skip_data + def test_lazy_preserved_save(self): + fpath = tests.get_data_path( + ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc") + ) + # While loading, "turn off" loading small variables as real data. + with mock.patch( + "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 + ): + acube = iris.load_cube(fpath, "air_temperature") + self.assertTrue(acube.has_lazy_data()) + # Also check a coord with lazy points + bounds. + self.assertTrue(acube.coord("forecast_period").has_lazy_points()) + self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(acube) + # Check that cube data is not realised, also coord points + bounds. + self.assertTrue(acube.has_lazy_data()) + self.assertTrue(acube.coord("forecast_period").has_lazy_points()) + self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) + + +@tests.skip_data +class TestCellMeasures(tests.IrisTest): + def setUp(self): + self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) + + def test_load_raw(self): + (cube,) = iris.load_raw(self.fname) + self.assertEqual(len(cube.cell_measures()), 1) + self.assertEqual(cube.cell_measures()[0].measure, "area") + + def test_load(self): + cube = iris.load_cube(self.fname) + self.assertEqual(len(cube.cell_measures()), 1) + self.assertEqual(cube.cell_measures()[0].measure, "area") + + def test_merge_cell_measure_aware(self): + (cube1,) = iris.load_raw(self.fname) + (cube2,) = iris.load_raw(self.fname) + cube2._cell_measures_and_dims[0][0].var_name = "not_areat" + cubes = CubeList([cube1, cube2]).merge() + self.assertEqual(len(cubes), 2) + + def test_concatenate_cell_measure_aware(self): + (cube1,) = iris.load_raw(self.fname) + cube1 = cube1[:, :, 0, 0] + cm_and_dims = cube1._cell_measures_and_dims + (cube2,) = iris.load_raw(self.fname) + cube2 = cube2[:, :, 0, 0] + cube2._cell_measures_and_dims[0][0].var_name = "not_areat" + cube2.coord("time").points = cube2.coord("time").points + 1 + cubes = CubeList([cube1, cube2]).concatenate() + self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) + self.assertEqual(len(cubes), 2) + + def test_concatenate_cell_measure_match(self): + (cube1,) = iris.load_raw(self.fname) + cube1 = cube1[:, :, 0, 0] + cm_and_dims = cube1._cell_measures_and_dims + (cube2,) = iris.load_raw(self.fname) + cube2 = cube2[:, :, 0, 0] + cube2.coord("time").points = cube2.coord("time").points + 1 + cubes = CubeList([cube1, cube2]).concatenate() + self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) + self.assertEqual(len(cubes), 1) + + def test_round_trip(self): + (cube,) = iris.load(self.fname) + with self.temp_filename(suffix=".nc") as filename: + iris.save(cube, filename, unlimited_dimensions=[]) + (round_cube,) = iris.load_raw(filename) + self.assertEqual(len(round_cube.cell_measures()), 1) + self.assertEqual(round_cube.cell_measures()[0].measure, "area") + + def test_print(self): + cube = iris.load_cube(self.fname) + printed = cube.__str__() + self.assertIn( + ( + "Cell measures:\n" + " cell_area - - " + " x x" + ), + printed, + ) + + +class TestCellMethod_unknown(tests.IrisTest): + def test_unknown_method(self): + cube = Cube([1, 2], long_name="odd_phenomenon") + cube.add_cell_method(CellMethod(method="oddity", coords=("x",))) + temp_dirpath = tempfile.mkdtemp() + try: + temp_filepath = os.path.join(temp_dirpath, "tmp.nc") + iris.save(cube, temp_filepath) + with warnings.catch_warnings(record=True) as warning_records: + iris.load(temp_filepath) + # Filter to get the warning we are interested in. + warning_messages = [record.message for record in warning_records] + warning_messages = [ + warn + for warn in warning_messages + if isinstance(warn, UnknownCellMethodWarning) + ] + self.assertEqual(len(warning_messages), 1) + message = warning_messages[0].args[0] + msg = ( + "NetCDF variable 'odd_phenomenon' contains unknown cell " + "method 'oddity'" + ) + self.assertIn(msg, message) + finally: + shutil.rmtree(temp_dirpath) + + +def _get_scale_factor_add_offset(cube, datatype): + """Utility function used by netCDF data packing tests.""" + if isinstance(datatype, dict): + dt = np.dtype(datatype["dtype"]) + else: + dt = np.dtype(datatype) + cmax = cube.data.max() + cmin = cube.data.min() + n = dt.itemsize * 8 + if ma.isMaskedArray(cube.data): + masked = True + else: + masked = False + if masked: + scale_factor = (cmax - cmin) / (2**n - 2) + else: + scale_factor = (cmax - cmin) / (2**n - 1) + if dt.kind == "u": + add_offset = cmin + elif dt.kind == "i": + if masked: + add_offset = (cmax + cmin) / 2 + else: + add_offset = cmin + 2 ** (n - 1) * scale_factor + return (scale_factor, add_offset) + + +@tests.skip_data +class TestPackedData(tests.IrisTest): + def _single_test(self, datatype, CDLfilename, manual=False): + # Read PP input file. + file_in = tests.get_data_path( + ( + "PP", + "cf_processing", + "000003000000.03.236.000128.1990.12.01.00.00.b.pp", + ) + ) + cube = iris.load_cube(file_in) + scale_factor, offset = _get_scale_factor_add_offset(cube, datatype) + if manual: + packspec = dict( + dtype=datatype, scale_factor=scale_factor, add_offset=offset + ) + else: + packspec = datatype + # Write Cube to netCDF file. + with self.temp_filename(suffix=".nc") as file_out: + iris.save(cube, file_out, packing=packspec) + decimal = int(-np.log10(scale_factor)) + packedcube = iris.load_cube(file_out) + # Check that packed cube is accurate to expected precision + self.assertArrayAlmostEqual( + cube.data, packedcube.data, decimal=decimal + ) + # Check the netCDF file against CDL expected output. + self.assertCDL( + file_out, + ( + "integration", + "netcdf", + "general", + "TestPackedData", + CDLfilename, + ), + ) + + def test_single_packed_signed(self): + """Test saving a single CF-netCDF file with packing.""" + self._single_test("i2", "single_packed_signed.cdl") + + def test_single_packed_unsigned(self): + """Test saving a single CF-netCDF file with packing into unsigned.""" + self._single_test("u1", "single_packed_unsigned.cdl") + + def test_single_packed_manual_scale(self): + """Test saving a single CF-netCDF file with packing with scale + factor and add_offset set manually.""" + self._single_test("i2", "single_packed_manual.cdl", manual=True) + + def _multi_test(self, CDLfilename, multi_dtype=False): + """Test saving multiple packed cubes with pack_dtype list.""" + # Read PP input file. + file_in = tests.get_data_path( + ("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp") + ) + cubes = iris.load(file_in) + # ensure cube order is the same: + cubes.sort(key=lambda cube: cube.cell_methods[0].method) + datatype = "i2" + scale_factor, offset = _get_scale_factor_add_offset(cubes[0], datatype) + if multi_dtype: + packdict = dict( + dtype=datatype, scale_factor=scale_factor, add_offset=offset + ) + packspec = [packdict, None, "u2"] + dtypes = packspec + else: + packspec = datatype + dtypes = repeat(packspec) + + # Write Cube to netCDF file. + with self.temp_filename(suffix=".nc") as file_out: + iris.save(cubes, file_out, packing=packspec) + # Check the netCDF file against CDL expected output. + self.assertCDL( + file_out, + ( + "integration", + "netcdf", + "general", + "TestPackedData", + CDLfilename, + ), + ) + packedcubes = iris.load(file_out) + packedcubes.sort(key=lambda cube: cube.cell_methods[0].method) + for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes): + if dtype: + sf, ao = _get_scale_factor_add_offset(cube, dtype) + decimal = int(-np.log10(sf)) + # Check that packed cube is accurate to expected precision + self.assertArrayAlmostEqual( + cube.data, packedcube.data, decimal=decimal + ) + else: + self.assertArrayEqual(cube.data, packedcube.data) + + def test_multi_packed_single_dtype(self): + """Test saving multiple packed cubes with the same pack_dtype.""" + # Read PP input file. + self._multi_test("multi_packed_single_dtype.cdl") + + def test_multi_packed_multi_dtype(self): + """Test saving multiple packed cubes with pack_dtype list.""" + # Read PP input file. + self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True) + + +class TestScalarCube(tests.IrisTest): + def test_scalar_cube_save_load(self): + cube = iris.cube.Cube(1, long_name="scalar_cube") + with self.temp_filename(suffix=".nc") as fout: + iris.save(cube, fout) + scalar_cube = iris.load_cube(fout) + self.assertEqual(scalar_cube.name(), "scalar_cube") + + +@tests.skip_data +class TestConstrainedLoad(tests.IrisTest): + filename = tests.get_data_path( + ("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc") + ) + + def test_netcdf_with_NameConstraint(self): + constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs") + cubes = iris.load(self.filename, constr) + self.assertEqual(len(cubes), 1) + self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs") + + def test_netcdf_with_no_constraint(self): + cubes = iris.load(self.filename) + self.assertEqual(len(cubes), 3) + + +class TestSkippedCoord: + # If a coord/cell measure/etcetera cannot be added to the loaded Cube, a + # Warning is raised and the coord is skipped. + # This 'catching' is generic to all CannotAddErrors, but currently the only + # such problem that can exist in a NetCDF file is a mismatch of dimensions + # between phenomenon and coord. + + cdl_core = """ +dimensions: + length_scale = 1 ; + lat = 3 ; +variables: + float lat(lat) ; + lat:standard_name = "latitude" ; + lat:units = "degrees_north" ; + short lst_unc_sys(length_scale) ; + lst_unc_sys:long_name = "uncertainty from large-scale systematic + errors" ; + lst_unc_sys:units = "kelvin" ; + lst_unc_sys:coordinates = "lat" ; + +data: + lat = 0, 1, 2; + """ + + @pytest.fixture(autouse=True) + def create_nc_file(self, tmp_path): + file_name = "dim_mismatch" + cdl = f"netcdf {file_name}" + "{\n" + self.cdl_core + "\n}" + self.nc_path = (tmp_path / file_name).with_suffix(".nc") + ncgen_from_cdl( + cdl_str=cdl, + cdl_path=None, + nc_path=str(self.nc_path), + ) + yield + self.nc_path.unlink() + + def test_lat_not_loaded(self): + # iris#5068 includes discussion of possible retention of the skipped + # coords in the future. + with pytest.warns( + match="Missing data dimensions for multi-valued DimCoord" + ): + cube = iris.load_cube(self.nc_path) + with pytest.raises(iris.exceptions.CoordinateNotFoundError): + _ = cube.coord("lat") + + +@tests.skip_data +class TestDatasetAndPathLoads(tests.IrisTest): + @classmethod + def setUpClass(cls): + cls.filepath = tests.get_data_path( + ["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"] + ) + cls.phenom_id = "Carbon Dioxide" + cls.expected = iris.load_cube(cls.filepath, cls.phenom_id) + + def test_basic_load(self): + # test loading from an open Dataset, in place of a filepath spec. + ds = nc.Dataset(self.filepath) + result = iris.load_cube(ds, self.phenom_id) + # It should still be open (!) + self.assertTrue(ds.isopen()) + ds.close() + + # Check that result is just the same as a 'direct' load. + self.assertEqual(self.expected, result) + + def test_path_string_load_same(self): + # Check that loading from a Path is the same as passing a filepath string. + # Apart from general utility, checks that we won't mistake a Path for a Dataset. + path = Path(self.filepath) + result = iris.load_cube(path, self.phenom_id) + self.assertEqual(result, self.expected) + + +@tests.skip_data +class TestDatasetAndPathSaves(tests.IrisTest): + @classmethod + def setUpClass(cls): + # Create a temp directory for transient test files. + cls.temp_dir = tempfile.mkdtemp() + cls.testpath = tests.get_data_path( + ["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"] + ) + # Load some test data for save testing. + testdata = iris.load(cls.testpath) + # Sort to ensure non-random cube order. + testdata = sorted(testdata, key=lambda cube: cube.name()) + cls.testdata = testdata + + @classmethod + def tearDownClass(cls): + # Destroy the temp directory. + shutil.rmtree(cls.temp_dir) + + def test_basic_save(self): + # test saving to a Dataset, in place of a filepath spec. + # NOTE that this requires 'compute=False', as delayed saves can only operate on + # a closed file. + + # Save to netcdf file in the usual way. + filepath_direct = f"{self.temp_dir}/tmp_direct.nc" + iris.save(self.testdata, filepath_direct) + # Check against test-specific CDL result file. + self.assertCDL(filepath_direct) + + # Save same data indirectly via a netcdf dataset. + filepath_indirect = f"{self.temp_dir}/tmp_indirect.nc" + nc_dataset = nc.Dataset(filepath_indirect, "w") + # NOTE: we **must** use delayed saving here, as we cannot do direct saving to + # a user-owned dataset. + result = iris.save( + self.testdata, nc_dataset, saver="nc", compute=False + ) + + # Do some very basic sanity checks on the resulting Dataset. + # It should still be open (!) + self.assertTrue(nc_dataset.isopen()) + self.assertEqual( + ["time", "levelist", "latitude", "longitude"], + list(nc_dataset.dimensions), + ) + self.assertEqual( + ["co2", "time", "levelist", "latitude", "longitude", "lnsp"], + list(nc_dataset.variables), + ) + nc_dataset.close() + + # Check the saved file against the same CDL as the 'normal' save. + self.assertCDL(filepath_indirect) + + # Confirm that cube content is however not yet written. + ds = nc.Dataset(filepath_indirect) + for cube in self.testdata: + assert np.all(ds.variables[cube.var_name][:].mask) + ds.close() + + # Complete the delayed saves. + result.compute() + + # Check that data now *is* written. + ds = nc.Dataset(filepath_indirect) + for cube in self.testdata: + assert np.all(ds.variables[cube.var_name][:] == cube.data) + ds.close() + + def test_computed_delayed_save__fail(self): + # Call as above 'test_basic_save' but with "compute=True" : this should raise + # an error. + filepath_indirect = f"{self.temp_dir}/tmp_indirect_complete.nc" + nc_dataset = nc.Dataset(filepath_indirect, "w") + + # NOTE: a "normal" compute=True call should raise an error. + msg = "Cannot save to a user-provided dataset with 'compute=True'" + with pytest.raises(ValueError, match=msg): + iris.save(self.testdata, nc_dataset, saver="nc") + + def test_path_string_save_same(self): + # Ensure that save to a Path is the same as passing a filepath string. + # Apart from general utility, checks that we won't mistake a Path for a Dataset. + tempfile_fromstr = f"{self.temp_dir}/tmp_fromstr.nc" + iris.save(self.testdata, tempfile_fromstr) + tempfile_frompath = f"{self.temp_dir}/tmp_frompath.nc" + path = Path(tempfile_frompath) + iris.save(self.testdata, path) + self.assertCDL(tempfile_fromstr) + self.assertCDL(tempfile_frompath) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py new file mode 100644 index 0000000000..3395296e11 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -0,0 +1,126 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Integration tests for iris#3367 - loading a self-referencing NetCDF file.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +import os +import tempfile +from unittest import mock + +import numpy as np + +import iris +from iris.fileformats.netcdf import _thread_safe_nc + + +@tests.skip_data +class TestCMIP6VolcelloLoad(tests.IrisTest): + def setUp(self): + self.fname = tests.get_data_path( + ( + "NetCDF", + "volcello", + "volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc", + ) + ) + + def test_cmip6_volcello_load_issue_3367(self): + # Ensure that reading a file which references itself in + # `cell_measures` can be read. At the same time, ensure that we + # still receive a warning about other variables mentioned in + # `cell_measures` i.e. a warning should be raised about missing + # areacello. + areacello_str = "areacello" + volcello_str = "volcello" + expected_msg = ( + "Missing CF-netCDF measure variable %r, " + "referenced by netCDF variable %r" % (areacello_str, volcello_str) + ) + + with mock.patch("warnings.warn") as warn: + # ensure file loads without failure + cube = iris.load_cube(self.fname) + warn.assert_has_calls([mock.call(expected_msg)]) + + # extra check to ensure correct variable was found + assert cube.standard_name == "ocean_volume" + + +class TestSelfReferencingVarLoad(tests.IrisTest): + def setUp(self): + self.temp_dir_path = os.path.join( + tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc" + ) + dataset = _thread_safe_nc.DatasetWrapper(self.temp_dir_path, "w") + + dataset.createDimension("lat", 4) + dataset.createDimension("lon", 5) + dataset.createDimension("lev", 3) + + latitudes = dataset.createVariable("lat", np.float64, ("lat",)) + longitudes = dataset.createVariable("lon", np.float64, ("lon",)) + levels = dataset.createVariable("lev", np.float64, ("lev",)) + volcello = dataset.createVariable( + "volcello", np.float32, ("lat", "lon", "lev") + ) + + latitudes.standard_name = "latitude" + latitudes.units = "degrees_north" + latitudes.axis = "Y" + latitudes[:] = np.linspace(-90, 90, 4) + + longitudes.standard_name = "longitude" + longitudes.units = "degrees_east" + longitudes.axis = "X" + longitudes[:] = np.linspace(0, 360, 5) + + levels.standard_name = "olevel" + levels.units = "centimeters" + levels.positive = "down" + levels.axis = "Z" + levels[:] = np.linspace(0, 10**5, 3) + + volcello.id = "volcello" + volcello.out_name = "volcello" + volcello.standard_name = "ocean_volume" + volcello.units = "m3" + volcello.realm = "ocean" + volcello.frequency = "fx" + volcello.cell_measures = "area: areacello volume: volcello" + volcello = np.arange(4 * 5 * 3).reshape((4, 5, 3)) + + dataset.close() + + def test_self_referencing_load_issue_3367(self): + # Ensure that reading a file which references itself in + # `cell_measures` can be read. At the same time, ensure that we + # still receive a warning about other variables mentioned in + # `cell_measures` i.e. a warning should be raised about missing + # areacello. + areacello_str = "areacello" + volcello_str = "volcello" + expected_msg = ( + "Missing CF-netCDF measure variable %r, " + "referenced by netCDF variable %r" % (areacello_str, volcello_str) + ) + + with mock.patch("warnings.warn") as warn: + # ensure file loads without failure + cube = iris.load_cube(self.temp_dir_path) + warn.assert_called_with(expected_msg) + + # extra check to ensure correct variable was found + assert cube.standard_name == "ocean_volume" + + def tearDown(self): + os.remove(self.temp_dir_path) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py new file mode 100644 index 0000000000..5ed32d0671 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -0,0 +1,124 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests covering thread safety during loading/saving netcdf files. + +These tests are intended to catch non-thread-safe behaviour by producing CI +'irregularities' that are noticed and investigated. They cannot reliably +produce standard pytest failures, since the tools for 'correctly' +testing non-thread-safe behaviour are not available at the Python layer. +Thread safety problems can be either produce errors (like a normal test) OR +segfaults (test doesn't complete, pytest-xdiff starts a new group worker, the +end exit code is still non-0), and some problems do not occur in every test +run. + +Token assertions are included after the line that is expected to reveal +a thread safety problem, as this seems to be good testing practice. + +""" +from pathlib import Path + +import dask +from dask import array as da +import numpy as np +import pytest + +import iris +from iris.cube import Cube, CubeList +from iris.tests import get_data_path + + +@pytest.fixture +def tiny_chunks(): + """Guarantee that Dask will use >1 thread by guaranteeing >1 chunk.""" + + def _check_tiny_loaded_chunks(cube: Cube): + assert cube.has_lazy_data() + cube_lazy_data = cube.core_data() + assert np.product(cube_lazy_data.chunksize) < cube_lazy_data.size + + with dask.config.set({"array.chunk-size": "1KiB"}): + yield _check_tiny_loaded_chunks + + +@pytest.fixture +def save_common(tmp_path): + save_path = tmp_path / "tmp.nc" + + def _func(cube: Cube): + assert not save_path.exists() + iris.save(cube, save_path) + assert save_path.exists() + + yield _func + + +@pytest.fixture +def get_cubes_from_netcdf(): + load_dir_path = Path(get_data_path(["NetCDF", "global", "xyt"])) + loaded = iris.load(load_dir_path.glob("*"), "tcco2") + smaller = CubeList([c[0] for c in loaded]) + yield smaller + + +def test_realise_data(tiny_chunks, get_cubes_from_netcdf): + cube = get_cubes_from_netcdf[0] + tiny_chunks(cube) + _ = cube.data # Any problems are expected here. + assert not cube.has_lazy_data() + + +def test_realise_data_multisource(get_cubes_from_netcdf): + """Load from multiple sources to force Dask to use multiple threads.""" + cubes = get_cubes_from_netcdf + final_cube = sum(cubes) + _ = final_cube.data # Any problems are expected here. + assert not final_cube.has_lazy_data() + + +def test_save(tiny_chunks, save_common): + cube = Cube(da.ones(10000)) + tiny_chunks(cube) + save_common(cube) # Any problems are expected here. + + +def test_stream(tiny_chunks, get_cubes_from_netcdf, save_common): + cube = get_cubes_from_netcdf[0] + tiny_chunks(cube) + save_common(cube) # Any problems are expected here. + + +def test_stream_multisource(get_cubes_from_netcdf, save_common): + """Load from multiple sources to force Dask to use multiple threads.""" + cubes = get_cubes_from_netcdf + final_cube = sum(cubes) + save_common(final_cube) # Any problems are expected here. + + +def test_stream_multisource__manychunks( + tiny_chunks, get_cubes_from_netcdf, save_common +): + """ + As above, but with many more small chunks. + + As this previously showed additional, sporadic problems which only emerge + (statistically) with larger numbers of chunks. + + """ + cubes = get_cubes_from_netcdf + final_cube = sum(cubes) + save_common(final_cube) # Any problems are expected here. + + +def test_comparison(get_cubes_from_netcdf): + """ + Comparing multiple loaded files forces co-realisation. + + See :func:`iris._lazy_data._co_realise_lazy_arrays` . + """ + cubes = get_cubes_from_netcdf + _ = cubes[:-1] == cubes[1:] # Any problems are expected here. + assert all([c.has_lazy_data() for c in cubes]) diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py index 6953534f2d..43287c7040 100755 --- a/lib/iris/tests/integration/test_Datums.py +++ b/lib/iris/tests/integration/test_Datums.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Integration tests for :class:`iris.coord_systems` datum suppport.""" +"""Integration tests for :class:`iris.coord_systems` datum support.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/integration/test_cube.py b/lib/iris/tests/integration/test_cube.py index 996362f594..ad6666d28e 100644 --- a/lib/iris/tests/integration/test_cube.py +++ b/lib/iris/tests/integration/test_cube.py @@ -9,6 +9,8 @@ # importing anything else. import iris.tests as tests # isort:skip +from unittest import mock + import numpy as np import iris @@ -23,7 +25,13 @@ def test_agg_by_aux_coord(self): problem_test_file = tests.get_data_path( ("NetCDF", "testing", "small_theta_colpex.nc") ) - cube = iris.load_cube(problem_test_file, "air_potential_temperature") + # While loading, "turn off" loading small variables as real data. + with mock.patch( + "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 + ): + cube = iris.load_cube( + problem_test_file, "air_potential_temperature" + ) # Test aggregating by aux coord, notably the `forecast_period` aux # coord on `cube`, whose `_points` attribute is a lazy array. diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py deleted file mode 100644 index 851c539ade..0000000000 --- a/lib/iris/tests/integration/test_netcdf.py +++ /dev/null @@ -1,958 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for loading and saving netcdf files.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from contextlib import contextmanager -from itertools import repeat -import os.path -from os.path import join as path_join -import shutil -import tempfile -from unittest import mock -import warnings - -import netCDF4 as nc -import numpy as np -import numpy.ma as ma -import pytest - -import iris -import iris.coord_systems -from iris.coords import CellMethod, DimCoord -from iris.cube import Cube, CubeList -import iris.exceptions -from iris.fileformats.netcdf import ( - CF_CONVENTIONS_VERSION, - Saver, - UnknownCellMethodWarning, -) -import iris.tests.stock as stock -from iris.tests.stock.netcdf import ncgen_from_cdl -import iris.tests.unit.fileformats.netcdf.test_load_cubes as tlc - - -@tests.skip_data -class TestAtmosphereSigma(tests.IrisTest): - def setUp(self): - # Modify stock cube so it is suitable to have a atmosphere sigma - # factory added to it. - cube = stock.realistic_4d_no_derived() - cube.coord("surface_altitude").rename("surface_air_pressure") - cube.coord("surface_air_pressure").units = "Pa" - cube.coord("sigma").units = "1" - ptop_coord = iris.coords.AuxCoord(1000.0, var_name="ptop", units="Pa") - cube.add_aux_coord(ptop_coord, ()) - cube.remove_coord("level_height") - # Construct and add atmosphere sigma factory. - factory = iris.aux_factory.AtmosphereSigmaFactory( - cube.coord("ptop"), - cube.coord("sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - self.cube = cube - - def test_save(self): - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - self.assertCDL(filename) - - def test_save_load_loop(self): - # Ensure that the AtmosphereSigmaFactory is automatically loaded - # when loading the file. - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - cube = iris.load_cube(filename, "air_potential_temperature") - assert cube.coords("air_pressure") - - -@tests.skip_data -class TestHybridPressure(tests.IrisTest): - def setUp(self): - # Modify stock cube so it is suitable to have a - # hybrid pressure factory added to it. - cube = stock.realistic_4d_no_derived() - cube.coord("surface_altitude").rename("surface_air_pressure") - cube.coord("surface_air_pressure").units = "Pa" - cube.coord("level_height").rename("level_pressure") - cube.coord("level_pressure").units = "Pa" - # Construct and add hybrid pressure factory. - factory = iris.aux_factory.HybridPressureFactory( - cube.coord("level_pressure"), - cube.coord("sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - self.cube = cube - - def test_save(self): - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - self.assertCDL(filename) - - def test_save_load_loop(self): - # Tests an issue where the variable names in the formula - # terms changed to the standard_names instead of the variable names - # when loading a previously saved cube. - with self.temp_filename(suffix=".nc") as filename, self.temp_filename( - suffix=".nc" - ) as other_filename: - iris.save(self.cube, filename) - cube = iris.load_cube(filename, "air_potential_temperature") - iris.save(cube, other_filename) - other_cube = iris.load_cube( - other_filename, "air_potential_temperature" - ) - self.assertEqual(cube, other_cube) - - -@tests.skip_data -class TestSaveMultipleAuxFactories(tests.IrisTest): - def test_hybrid_height_and_pressure(self): - cube = stock.realistic_4d() - cube.add_aux_coord( - iris.coords.DimCoord( - 1200.0, long_name="level_pressure", units="hPa" - ) - ) - cube.add_aux_coord( - iris.coords.DimCoord(0.5, long_name="other sigma", units="1") - ) - cube.add_aux_coord( - iris.coords.DimCoord( - 1000.0, long_name="surface_air_pressure", units="hPa" - ) - ) - factory = iris.aux_factory.HybridPressureFactory( - cube.coord("level_pressure"), - cube.coord("other sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - with self.temp_filename(suffix=".nc") as filename: - iris.save(cube, filename) - self.assertCDL(filename) - - def test_shared_primary(self): - cube = stock.realistic_4d() - factory = iris.aux_factory.HybridHeightFactory( - cube.coord("level_height"), - cube.coord("sigma"), - cube.coord("surface_altitude"), - ) - factory.rename("another altitude") - cube.add_aux_factory(factory) - with self.temp_filename( - suffix=".nc" - ) as filename, self.assertRaisesRegex( - ValueError, "multiple aux factories" - ): - iris.save(cube, filename) - - def test_hybrid_height_cubes(self): - hh1 = stock.simple_4d_with_hybrid_height() - hh1.attributes["cube"] = "hh1" - hh2 = stock.simple_4d_with_hybrid_height() - hh2.attributes["cube"] = "hh2" - sa = hh2.coord("surface_altitude") - sa.points = sa.points * 10 - with self.temp_filename(".nc") as fname: - iris.save([hh1, hh2], fname) - cubes = iris.load(fname, "air_temperature") - cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"]) - self.assertCML(cubes) - - def test_hybrid_height_cubes_on_dimension_coordinate(self): - hh1 = stock.hybrid_height() - hh2 = stock.hybrid_height() - sa = hh2.coord("surface_altitude") - sa.points = sa.points * 10 - emsg = "Unable to create dimensonless vertical coordinate." - with self.temp_filename(".nc") as fname, self.assertRaisesRegex( - ValueError, emsg - ): - iris.save([hh1, hh2], fname) - - -class TestUmVersionAttribute(tests.IrisTest): - def test_single_saves_as_global(self): - cube = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(cube, nc_path) - self.assertCDL(nc_path) - - def test_multiple_same_saves_as_global(self): - cube_a = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - cube_b = Cube( - [1.0], - standard_name="air_pressure", - units="hPa", - attributes={"um_version": "4.3"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(CubeList([cube_a, cube_b]), nc_path) - self.assertCDL(nc_path) - - def test_multiple_different_saves_on_variables(self): - cube_a = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - cube_b = Cube( - [1.0], - standard_name="air_pressure", - units="hPa", - attributes={"um_version": "4.4"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(CubeList([cube_a, cube_b]), nc_path) - self.assertCDL(nc_path) - - -@contextmanager -def _patch_site_configuration(): - def cf_patch_conventions(conventions): - return ", ".join([conventions, "convention1, convention2"]) - - def update(config): - config["cf_profile"] = mock.Mock(name="cf_profile") - config["cf_patch"] = mock.Mock(name="cf_patch") - config["cf_patch_conventions"] = cf_patch_conventions - - orig_site_config = iris.site_configuration.copy() - update(iris.site_configuration) - yield - iris.site_configuration = orig_site_config - - -class TestConventionsAttributes(tests.IrisTest): - def test_patching_conventions_attribute(self): - # Ensure that user defined conventions are wiped and those which are - # saved patched through site_config can be loaded without an exception - # being raised. - cube = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"Conventions": "some user defined conventions"}, - ) - - # Patch the site configuration dictionary. - with _patch_site_configuration(), self.temp_filename(".nc") as nc_path: - iris.save(cube, nc_path) - res = iris.load_cube(nc_path) - - self.assertEqual( - res.attributes["Conventions"], - "{}, {}, {}".format( - CF_CONVENTIONS_VERSION, "convention1", "convention2" - ), - ) - - -class TestLazySave(tests.IrisTest): - @tests.skip_data - def test_lazy_preserved_save(self): - fpath = tests.get_data_path( - ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc") - ) - acube = iris.load_cube(fpath, "air_temperature") - self.assertTrue(acube.has_lazy_data()) - # Also check a coord with lazy points + bounds. - self.assertTrue(acube.coord("forecast_period").has_lazy_points()) - self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(acube) - # Check that cube data is not realised, also coord points + bounds. - self.assertTrue(acube.has_lazy_data()) - self.assertTrue(acube.coord("forecast_period").has_lazy_points()) - self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) - - -@tests.skip_data -class TestCellMeasures(tests.IrisTest): - def setUp(self): - self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) - - def test_load_raw(self): - (cube,) = iris.load_raw(self.fname) - self.assertEqual(len(cube.cell_measures()), 1) - self.assertEqual(cube.cell_measures()[0].measure, "area") - - def test_load(self): - cube = iris.load_cube(self.fname) - self.assertEqual(len(cube.cell_measures()), 1) - self.assertEqual(cube.cell_measures()[0].measure, "area") - - def test_merge_cell_measure_aware(self): - (cube1,) = iris.load_raw(self.fname) - (cube2,) = iris.load_raw(self.fname) - cube2._cell_measures_and_dims[0][0].var_name = "not_areat" - cubes = CubeList([cube1, cube2]).merge() - self.assertEqual(len(cubes), 2) - - def test_concatenate_cell_measure_aware(self): - (cube1,) = iris.load_raw(self.fname) - cube1 = cube1[:, :, 0, 0] - cm_and_dims = cube1._cell_measures_and_dims - (cube2,) = iris.load_raw(self.fname) - cube2 = cube2[:, :, 0, 0] - cube2._cell_measures_and_dims[0][0].var_name = "not_areat" - cube2.coord("time").points = cube2.coord("time").points + 1 - cubes = CubeList([cube1, cube2]).concatenate() - self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) - self.assertEqual(len(cubes), 2) - - def test_concatenate_cell_measure_match(self): - (cube1,) = iris.load_raw(self.fname) - cube1 = cube1[:, :, 0, 0] - cm_and_dims = cube1._cell_measures_and_dims - (cube2,) = iris.load_raw(self.fname) - cube2 = cube2[:, :, 0, 0] - cube2.coord("time").points = cube2.coord("time").points + 1 - cubes = CubeList([cube1, cube2]).concatenate() - self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) - self.assertEqual(len(cubes), 1) - - def test_round_trip(self): - (cube,) = iris.load(self.fname) - with self.temp_filename(suffix=".nc") as filename: - iris.save(cube, filename, unlimited_dimensions=[]) - (round_cube,) = iris.load_raw(filename) - self.assertEqual(len(round_cube.cell_measures()), 1) - self.assertEqual(round_cube.cell_measures()[0].measure, "area") - - def test_print(self): - cube = iris.load_cube(self.fname) - printed = cube.__str__() - self.assertIn( - ( - "Cell measures:\n" - " cell_area - - " - " x x" - ), - printed, - ) - - -@tests.skip_data -class TestCMIP6VolcelloLoad(tests.IrisTest): - def setUp(self): - self.fname = tests.get_data_path( - ( - "NetCDF", - "volcello", - "volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc", - ) - ) - - def test_cmip6_volcello_load_issue_3367(self): - # Ensure that reading a file which references itself in - # `cell_measures` can be read. At the same time, ensure that we - # still receive a warning about other variables mentioned in - # `cell_measures` i.e. a warning should be raised about missing - # areacello. - areacello_str = "areacello" - volcello_str = "volcello" - expected_msg = ( - "Missing CF-netCDF measure variable %r, " - "referenced by netCDF variable %r" % (areacello_str, volcello_str) - ) - - with mock.patch("warnings.warn") as warn: - # ensure file loads without failure - cube = iris.load_cube(self.fname) - warn.assert_has_calls([mock.call(expected_msg)]) - - # extra check to ensure correct variable was found - assert cube.standard_name == "ocean_volume" - - -class TestSelfReferencingVarLoad(tests.IrisTest): - def setUp(self): - self.temp_dir_path = os.path.join( - tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc" - ) - dataset = nc.Dataset(self.temp_dir_path, "w") - - dataset.createDimension("lat", 4) - dataset.createDimension("lon", 5) - dataset.createDimension("lev", 3) - - latitudes = dataset.createVariable("lat", np.float64, ("lat",)) - longitudes = dataset.createVariable("lon", np.float64, ("lon",)) - levels = dataset.createVariable("lev", np.float64, ("lev",)) - volcello = dataset.createVariable( - "volcello", np.float32, ("lat", "lon", "lev") - ) - - latitudes.standard_name = "latitude" - latitudes.units = "degrees_north" - latitudes.axis = "Y" - latitudes[:] = np.linspace(-90, 90, 4) - - longitudes.standard_name = "longitude" - longitudes.units = "degrees_east" - longitudes.axis = "X" - longitudes[:] = np.linspace(0, 360, 5) - - levels.standard_name = "olevel" - levels.units = "centimeters" - levels.positive = "down" - levels.axis = "Z" - levels[:] = np.linspace(0, 10**5, 3) - - volcello.id = "volcello" - volcello.out_name = "volcello" - volcello.standard_name = "ocean_volume" - volcello.units = "m3" - volcello.realm = "ocean" - volcello.frequency = "fx" - volcello.cell_measures = "area: areacello volume: volcello" - volcello = np.arange(4 * 5 * 3).reshape((4, 5, 3)) - - dataset.close() - - def test_self_referencing_load_issue_3367(self): - # Ensure that reading a file which references itself in - # `cell_measures` can be read. At the same time, ensure that we - # still receive a warning about other variables mentioned in - # `cell_measures` i.e. a warning should be raised about missing - # areacello. - areacello_str = "areacello" - volcello_str = "volcello" - expected_msg = ( - "Missing CF-netCDF measure variable %r, " - "referenced by netCDF variable %r" % (areacello_str, volcello_str) - ) - - with mock.patch("warnings.warn") as warn: - # ensure file loads without failure - cube = iris.load_cube(self.temp_dir_path) - warn.assert_called_with(expected_msg) - - # extra check to ensure correct variable was found - assert cube.standard_name == "ocean_volume" - - def tearDown(self): - os.remove(self.temp_dir_path) - - -class TestCellMethod_unknown(tests.IrisTest): - def test_unknown_method(self): - cube = Cube([1, 2], long_name="odd_phenomenon") - cube.add_cell_method(CellMethod(method="oddity", coords=("x",))) - temp_dirpath = tempfile.mkdtemp() - try: - temp_filepath = os.path.join(temp_dirpath, "tmp.nc") - iris.save(cube, temp_filepath) - with warnings.catch_warnings(record=True) as warning_records: - iris.load(temp_filepath) - # Filter to get the warning we are interested in. - warning_messages = [record.message for record in warning_records] - warning_messages = [ - warn - for warn in warning_messages - if isinstance(warn, UnknownCellMethodWarning) - ] - self.assertEqual(len(warning_messages), 1) - message = warning_messages[0].args[0] - msg = ( - "NetCDF variable 'odd_phenomenon' contains unknown cell " - "method 'oddity'" - ) - self.assertIn(msg, message) - finally: - shutil.rmtree(temp_dirpath) - - -@tests.skip_data -class TestCoordSystem(tests.IrisTest): - def setUp(self): - tlc.setUpModule() - - def tearDown(self): - tlc.tearDownModule() - - def test_load_laea_grid(self): - cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc") - ) - ) - self.assertCML(cube, ("netcdf", "netcdf_laea.cml")) - - datum_cf_var_cdl = """ - netcdf output { - dimensions: - y = 4 ; - x = 3 ; - variables: - float data(y, x) ; - data :standard_name = "toa_brightness_temperature" ; - data :units = "K" ; - data :grid_mapping = "mercator" ; - int mercator ; - mercator:grid_mapping_name = "mercator" ; - mercator:longitude_of_prime_meridian = 0. ; - mercator:earth_radius = 6378169. ; - mercator:horizontal_datum_name = "OSGB36" ; - float y(y) ; - y:axis = "Y" ; - y:units = "m" ; - y:standard_name = "projection_y_coordinate" ; - float x(x) ; - x:axis = "X" ; - x:units = "m" ; - x:standard_name = "projection_x_coordinate" ; - - // global attributes: - :Conventions = "CF-1.7" ; - :standard_name_vocabulary = "CF Standard Name Table v27" ; - - data: - - data = - 0, 1, 2, - 3, 4, 5, - 6, 7, 8, - 9, 10, 11 ; - - mercator = _ ; - - y = 1, 2, 3, 5 ; - - x = -6, -4, -2 ; - - } - """ - - datum_wkt_cdl = """ -netcdf output5 { -dimensions: - y = 4 ; - x = 3 ; -variables: - float data(y, x) ; - data :standard_name = "toa_brightness_temperature" ; - data :units = "K" ; - data :grid_mapping = "mercator" ; - int mercator ; - mercator:grid_mapping_name = "mercator" ; - mercator:longitude_of_prime_meridian = 0. ; - mercator:earth_radius = 6378169. ; - mercator:longitude_of_projection_origin = 0. ; - mercator:false_easting = 0. ; - mercator:false_northing = 0. ; - mercator:scale_factor_at_projection_origin = 1. ; - mercator:crs_wkt = "PROJCRS[\\"unknown\\",BASEGEOGCRS[\\"unknown\\",DATUM[\\"OSGB36\\",ELLIPSOID[\\"unknown\\",6378169,0,LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8901]]],CONVERSION[\\"unknown\\",METHOD[\\"Mercator (variant B)\\",ID[\\"EPSG\\",9805]],PARAMETER[\\"Latitude of 1st standard parallel\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8823]],PARAMETER[\\"Longitude of natural origin\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8802]],PARAMETER[\\"False easting\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8806]],PARAMETER[\\"False northing\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8807]]],CS[Cartesian,2],AXIS[\\"(E)\\",east,ORDER[1],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]],AXIS[\\"(N)\\",north,ORDER[2],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]]" ; - float y(y) ; - y:axis = "Y" ; - y:units = "m" ; - y:standard_name = "projection_y_coordinate" ; - float x(x) ; - x:axis = "X" ; - x:units = "m" ; - x:standard_name = "projection_x_coordinate" ; - -// global attributes: - :standard_name_vocabulary = "CF Standard Name Table v27" ; - :Conventions = "CF-1.7" ; -data: - - data = - 0, 1, 2, - 3, 4, 5, - 6, 7, 8, - 9, 10, 11 ; - - mercator = _ ; - - y = 1, 2, 3, 5 ; - - x = -6, -4, -2 ; -} - """ - - def test_load_datum_wkt(self): - expected = "OSGB 1936" - nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) - with iris.FUTURE.context(datum_support=True): - cube = iris.load_cube(nc_path) - test_crs = cube.coord("projection_y_coordinate").coord_system - actual = str(test_crs.as_cartopy_crs().datum) - self.assertMultiLineEqual(expected, actual) - - def test_no_load_datum_wkt(self): - nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl) - with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): - cube = iris.load_cube(nc_path) - test_crs = cube.coord("projection_y_coordinate").coord_system - actual = str(test_crs.as_cartopy_crs().datum) - self.assertMultiLineEqual(actual, "unknown") - - def test_load_datum_cf_var(self): - expected = "OSGB 1936" - nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) - with iris.FUTURE.context(datum_support=True): - cube = iris.load_cube(nc_path) - test_crs = cube.coord("projection_y_coordinate").coord_system - actual = str(test_crs.as_cartopy_crs().datum) - self.assertMultiLineEqual(expected, actual) - - def test_no_load_datum_cf_var(self): - nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl) - with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"): - cube = iris.load_cube(nc_path) - test_crs = cube.coord("projection_y_coordinate").coord_system - actual = str(test_crs.as_cartopy_crs().datum) - self.assertMultiLineEqual(actual, "unknown") - - def test_save_datum(self): - expected = "OSGB 1936" - saved_crs = iris.coord_systems.Mercator( - ellipsoid=iris.coord_systems.GeogCS.from_datum("OSGB36") - ) - - base_cube = stock.realistic_3d() - base_lat_coord = base_cube.coord("grid_latitude") - test_lat_coord = DimCoord( - base_lat_coord.points, - standard_name="projection_y_coordinate", - coord_system=saved_crs, - ) - base_lon_coord = base_cube.coord("grid_longitude") - test_lon_coord = DimCoord( - base_lon_coord.points, - standard_name="projection_x_coordinate", - coord_system=saved_crs, - ) - test_cube = Cube( - base_cube.data, - standard_name=base_cube.standard_name, - units=base_cube.units, - dim_coords_and_dims=( - (base_cube.coord("time"), 0), - (test_lat_coord, 1), - (test_lon_coord, 2), - ), - ) - - with self.temp_filename(suffix=".nc") as filename: - iris.save(test_cube, filename) - with iris.FUTURE.context(datum_support=True): - cube = iris.load_cube(filename) - - test_crs = cube.coord("projection_y_coordinate").coord_system - actual = str(test_crs.as_cartopy_crs().datum) - self.assertMultiLineEqual(expected, actual) - - -def _get_scale_factor_add_offset(cube, datatype): - """Utility function used by netCDF data packing tests.""" - if isinstance(datatype, dict): - dt = np.dtype(datatype["dtype"]) - else: - dt = np.dtype(datatype) - cmax = cube.data.max() - cmin = cube.data.min() - n = dt.itemsize * 8 - if ma.isMaskedArray(cube.data): - masked = True - else: - masked = False - if masked: - scale_factor = (cmax - cmin) / (2**n - 2) - else: - scale_factor = (cmax - cmin) / (2**n - 1) - if dt.kind == "u": - add_offset = cmin - elif dt.kind == "i": - if masked: - add_offset = (cmax + cmin) / 2 - else: - add_offset = cmin + 2 ** (n - 1) * scale_factor - return (scale_factor, add_offset) - - -@tests.skip_data -class TestPackedData(tests.IrisTest): - def _single_test(self, datatype, CDLfilename, manual=False): - # Read PP input file. - file_in = tests.get_data_path( - ( - "PP", - "cf_processing", - "000003000000.03.236.000128.1990.12.01.00.00.b.pp", - ) - ) - cube = iris.load_cube(file_in) - scale_factor, offset = _get_scale_factor_add_offset(cube, datatype) - if manual: - packspec = dict( - dtype=datatype, scale_factor=scale_factor, add_offset=offset - ) - else: - packspec = datatype - # Write Cube to netCDF file. - with self.temp_filename(suffix=".nc") as file_out: - iris.save(cube, file_out, packing=packspec) - decimal = int(-np.log10(scale_factor)) - packedcube = iris.load_cube(file_out) - # Check that packed cube is accurate to expected precision - self.assertArrayAlmostEqual( - cube.data, packedcube.data, decimal=decimal - ) - # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, - ("integration", "netcdf", "TestPackedData", CDLfilename), - ) - - def test_single_packed_signed(self): - """Test saving a single CF-netCDF file with packing.""" - self._single_test("i2", "single_packed_signed.cdl") - - def test_single_packed_unsigned(self): - """Test saving a single CF-netCDF file with packing into unsigned.""" - self._single_test("u1", "single_packed_unsigned.cdl") - - def test_single_packed_manual_scale(self): - """Test saving a single CF-netCDF file with packing with scale - factor and add_offset set manually.""" - self._single_test("i2", "single_packed_manual.cdl", manual=True) - - def _multi_test(self, CDLfilename, multi_dtype=False): - """Test saving multiple packed cubes with pack_dtype list.""" - # Read PP input file. - file_in = tests.get_data_path( - ("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp") - ) - cubes = iris.load(file_in) - # ensure cube order is the same: - cubes.sort(key=lambda cube: cube.cell_methods[0].method) - datatype = "i2" - scale_factor, offset = _get_scale_factor_add_offset(cubes[0], datatype) - if multi_dtype: - packdict = dict( - dtype=datatype, scale_factor=scale_factor, add_offset=offset - ) - packspec = [packdict, None, "u2"] - dtypes = packspec - else: - packspec = datatype - dtypes = repeat(packspec) - - # Write Cube to netCDF file. - with self.temp_filename(suffix=".nc") as file_out: - iris.save(cubes, file_out, packing=packspec) - # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, - ("integration", "netcdf", "TestPackedData", CDLfilename), - ) - packedcubes = iris.load(file_out) - packedcubes.sort(key=lambda cube: cube.cell_methods[0].method) - for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes): - if dtype: - sf, ao = _get_scale_factor_add_offset(cube, dtype) - decimal = int(-np.log10(sf)) - # Check that packed cube is accurate to expected precision - self.assertArrayAlmostEqual( - cube.data, packedcube.data, decimal=decimal - ) - else: - self.assertArrayEqual(cube.data, packedcube.data) - - def test_multi_packed_single_dtype(self): - """Test saving multiple packed cubes with the same pack_dtype.""" - # Read PP input file. - self._multi_test("multi_packed_single_dtype.cdl") - - def test_multi_packed_multi_dtype(self): - """Test saving multiple packed cubes with pack_dtype list.""" - # Read PP input file. - self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True) - - -class TestScalarCube(tests.IrisTest): - def test_scalar_cube_save_load(self): - cube = iris.cube.Cube(1, long_name="scalar_cube") - with self.temp_filename(suffix=".nc") as fout: - iris.save(cube, fout) - scalar_cube = iris.load_cube(fout) - self.assertEqual(scalar_cube.name(), "scalar_cube") - - -class TestStandardName(tests.IrisTest): - def test_standard_name_roundtrip(self): - standard_name = "air_temperature detection_minimum" - cube = iris.cube.Cube(1, standard_name=standard_name) - with self.temp_filename(suffix=".nc") as fout: - iris.save(cube, fout) - detection_limit_cube = iris.load_cube(fout) - self.assertEqual(detection_limit_cube.standard_name, standard_name) - - -class TestLoadMinimalGeostationary(tests.IrisTest): - """ - Check we can load data with a geostationary grid-mapping, even when the - 'false-easting' and 'false_northing' properties are missing. - - """ - - _geostationary_problem_cdl = """ -netcdf geostationary_problem_case { -dimensions: - y = 2 ; - x = 3 ; -variables: - short radiance(y, x) ; - radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ; - radiance:units = "W m-2 sr-1 um-1" ; - radiance:coordinates = "y x" ; - radiance:grid_mapping = "imager_grid_mapping" ; - short y(y) ; - y:units = "rad" ; - y:axis = "Y" ; - y:long_name = "fixed grid projection y-coordinate" ; - y:standard_name = "projection_y_coordinate" ; - short x(x) ; - x:units = "rad" ; - x:axis = "X" ; - x:long_name = "fixed grid projection x-coordinate" ; - x:standard_name = "projection_x_coordinate" ; - int imager_grid_mapping ; - imager_grid_mapping:grid_mapping_name = "geostationary" ; - imager_grid_mapping:perspective_point_height = 35786023. ; - imager_grid_mapping:semi_major_axis = 6378137. ; - imager_grid_mapping:semi_minor_axis = 6356752.31414 ; - imager_grid_mapping:latitude_of_projection_origin = 0. ; - imager_grid_mapping:longitude_of_projection_origin = -75. ; - imager_grid_mapping:sweep_angle_axis = "x" ; - -data: - - // coord values, just so these can be dim-coords - y = 0, 1 ; - x = 0, 1, 2 ; - -} -""" - - @classmethod - def setUpClass(cls): - # Create a temp directory for transient test files. - cls.temp_dir = tempfile.mkdtemp() - cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl") - cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc") - # Create reference CDL and netcdf files from the CDL text. - ncgen_from_cdl( - cdl_str=cls._geostationary_problem_cdl, - cdl_path=cls.path_test_cdl, - nc_path=cls.path_test_nc, - ) - - @classmethod - def tearDownClass(cls): - # Destroy the temp directory. - shutil.rmtree(cls.temp_dir) - - def test_geostationary_no_false_offsets(self): - # Check we can load the test data and coordinate system properties are correct. - cube = iris.load_cube(self.path_test_nc) - # Check the coordinate system properties has the correct default properties. - cs = cube.coord_system() - self.assertIsInstance(cs, iris.coord_systems.Geostationary) - self.assertEqual(cs.false_easting, 0.0) - self.assertEqual(cs.false_northing, 0.0) - - -@tests.skip_data -class TestConstrainedLoad(tests.IrisTest): - filename = tests.get_data_path( - ("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc") - ) - - def test_netcdf_with_NameConstraint(self): - constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs") - cubes = iris.load(self.filename, constr) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs") - - def test_netcdf_with_no_constraint(self): - cubes = iris.load(self.filename) - self.assertEqual(len(cubes), 3) - - -class TestSkippedCoord: - # If a coord/cell measure/etcetera cannot be added to the loaded Cube, a - # Warning is raised and the coord is skipped. - # This 'catching' is generic to all CannotAddErrors, but currently the only - # such problem that can exist in a NetCDF file is a mismatch of dimensions - # between phenomenon and coord. - - cdl_core = """ -dimensions: - length_scale = 1 ; - lat = 3 ; -variables: - float lat(lat) ; - lat:standard_name = "latitude" ; - lat:units = "degrees_north" ; - short lst_unc_sys(length_scale) ; - lst_unc_sys:long_name = "uncertainty from large-scale systematic - errors" ; - lst_unc_sys:units = "kelvin" ; - lst_unc_sys:coordinates = "lat" ; - -data: - lat = 0, 1, 2; - """ - - @pytest.fixture(autouse=True) - def create_nc_file(self, tmp_path): - file_name = "dim_mismatch" - cdl = f"netcdf {file_name}" + "{\n" + self.cdl_core + "\n}" - self.nc_path = (tmp_path / file_name).with_suffix(".nc") - ncgen_from_cdl( - cdl_str=cdl, - cdl_path=None, - nc_path=str(self.nc_path), - ) - yield - self.nc_path.unlink() - - def test_lat_not_loaded(self): - # iris#5068 includes discussion of possible retention of the skipped - # coords in the future. - with pytest.warns( - match="Missing data dimensions for multi-valued DimCoord" - ): - cube = iris.load_cube(self.nc_path) - with pytest.raises(iris.exceptions.CoordinateNotFoundError): - _ = cube.coord("lat") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml index da315c36af..07bdb02725 100644 --- a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml +++ b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml @@ -8,506 +8,531 @@ - + - + - + - + - + - + - + - + - + @@ -516,8 +541,9 @@ - + @@ -531,506 +557,531 @@ - + - + - + - + - + - + - + - + - + @@ -1039,8 +1090,9 @@ - + @@ -1054,56 +1106,70 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/FF/air_temperature_1.cml b/lib/iris/tests/results/FF/air_temperature_1.cml index 043b9acc16..99c30075a0 100644 --- a/lib/iris/tests/results/FF/air_temperature_1.cml +++ b/lib/iris/tests/results/FF/air_temperature_1.cml @@ -8,10 +8,10 @@ - + - + @@ -21,26 +21,28 @@ - + - + - + diff --git a/lib/iris/tests/results/FF/air_temperature_2.cml b/lib/iris/tests/results/FF/air_temperature_2.cml index 200a80b54a..c94604b516 100644 --- a/lib/iris/tests/results/FF/air_temperature_2.cml +++ b/lib/iris/tests/results/FF/air_temperature_2.cml @@ -8,10 +8,10 @@ - + - + @@ -21,26 +21,28 @@ - + - + - + diff --git a/lib/iris/tests/results/FF/soil_temperature_1.cml b/lib/iris/tests/results/FF/soil_temperature_1.cml index 57303636c1..e014ac6b6f 100644 --- a/lib/iris/tests/results/FF/soil_temperature_1.cml +++ b/lib/iris/tests/results/FF/soil_temperature_1.cml @@ -8,27 +8,29 @@ - + - + - + - + @@ -40,7 +42,7 @@ - + diff --git a/lib/iris/tests/results/FF/surface_altitude_1.cml b/lib/iris/tests/results/FF/surface_altitude_1.cml index 2669624d37..e64c146e1a 100644 --- a/lib/iris/tests/results/FF/surface_altitude_1.cml +++ b/lib/iris/tests/results/FF/surface_altitude_1.cml @@ -8,32 +8,34 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt b/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt new file mode 100644 index 0000000000..9e1bfa95bf --- /dev/null +++ b/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt @@ -0,0 +1,641 @@ +[PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27870 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 56 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 5 + lblev: 1 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 897024, 0, 2, 0, 0, 1) + brsvd: (20.000338, 0.9977165, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 9.998206 + brlev: 0.0 + bhlev: 0.99885815 + bhrlev: 1.0 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -0.9375 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[ 0.8562012 0.9094238 0.9614258 ... 0.6916504 0.74731445 + 0.8022461 ] + [-0.29174805 -0.2397461 -0.18725586 ... -0.36645508 -0.34594727 + -0.32763672] + [-0.76000977 -0.6833496 -0.6347656 ... -0.9243164 -0.8911133 + -0.7675781 ] + ... + [-4.647461 -4.7456055 -4.8171387 ... -4.3222656 -4.428955 + -4.536133 ] + [-4.4577637 -4.5183105 -4.580078 ... -4.283203 -4.350342 + -4.4038086 ] + [-4.2226562 -4.284668 -4.342041 ... -4.01001 -4.085205 + -4.15625 ]] + field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27678 + lbcode: 1 + lbhem: 0 + lbrow: 144 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 57 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 6 + lblev: 1 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1208320, 0, 3, 0, 0, 1) + brsvd: (20.000338, 0.9977165, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 9.998206 + brlev: 0.0 + bhlev: 0.99885815 + bhrlev: 1.0 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -90.625 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[-1.2304688 -1.2202148 -1.2077637 ... -1.2546387 -1.246582 + -1.2387695 ] + [-1.0026855 -1.0119629 -1.0195312 ... -0.9663086 -0.9802246 + -0.9904785 ] + [-0.76538086 -0.8845215 -1.0141602 ... -0.72143555 -0.7011719 + -0.71118164] + ... + [-2.1013184 -1.9470215 -1.7893066 ... -2.564209 -2.4177246 + -2.2590332 ] + [-2.0922852 -1.9360352 -1.7756348 ... -2.5288086 -2.3864746 + -2.2421875 ] + [-2.0959473 -1.9523926 -1.8071289 ... -2.5092773 -2.3747559 + -2.2368164 ]] + field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27867 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 27 + lbpack: 0 + lbrel: 2 + lbfc: 19 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 1 + lblev: 1 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1519616, 0, 4, 0, 0, 1) + brsvd: (49.998882, 0.99429625, 0.0, 0.0) + bdatum: 0.0 + bacc: -10.0 + blev: 20.000338 + brlev: 0.0 + bhlev: 0.9977165 + bhrlev: 1.0 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[282.4619 282.4619 282.4619 ... 282.4619 282.4619 282.4619 ] + [282.3506 282.37598 282.40234 ... 282.27344 282.29883 282.3252 ] + [281.95508 282.03418 282.10938 ... 281.7578 281.81348 281.87988] + ... + [245.83203 245.84277 245.83398 ... 245.82031 245.82129 245.82324] + [244.42969 244.4248 244.42383 ... 244.45312 244.45215 244.44043] + [243.26758 243.26758 243.26758 ... 243.26758 243.26758 243.26758]] + field_title: AJHQA Time mean !C Atmos theta after timestep at 20.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27870 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 95 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 13 + lblev: 1 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1789952, 0, 10, 0, 0, 1) + brsvd: (49.998882, 0.99429625, 0.0, 0.0) + bdatum: 0.0 + bacc: -99.0 + blev: 20.000338 + brlev: 0.0 + bhlev: 0.9977165 + bhrlev: 1.0 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[0.00079939 0.00079939 0.00079939 ... 0.00079939 0.00079939 0.00079939] + [0.00087261 0.00087106 0.00086934 ... 0.00087724 0.00087613 0.00087428] + [0.00093523 0.00092579 0.00091752 ... 0.00095657 0.00094989 0.00094373] + ... + [0.00037911 0.0003811 0.00038037 ... 0.00037897 0.00037865 0.0003793 ] + [0.00033554 0.0003354 0.00033541 ... 0.0003389 0.00033855 0.00033566] + [0.00030907 0.00030907 0.00030907 ... 0.00030907 0.00030907 0.00030907]] + field_title: AJHQA Time mean !C Atmos specific humidity after timestep at 20.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27870 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 56 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 5 + lblev: 2 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 905216, 0, 2, 0, 0, 1) + brsvd: (80.00135, 0.9908815, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 49.998882 + brlev: 20.000338 + bhlev: 0.99429625 + bhrlev: 0.9977165 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -0.9375 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[ 1.0332031 1.0991211 1.1638184 ... 0.82910156 0.89819336 + 0.96606445] + [-0.46777344 -0.41455078 -0.35766602 ... -0.5932617 -0.5517578 + -0.51293945] + [-1.072998 -1.005127 -0.9387207 ... -1.3034668 -1.2263184 + -1.1523438 ] + ... + [-5.9941406 -6.099365 -6.1816406 ... -5.6379395 -5.7575684 + -5.8745117 ] + [-5.8913574 -5.9609375 -6.027832 ... -5.675537 -5.7558594 + -5.8239746 ] + [-5.727051 -5.7910156 -5.848633 ... -5.4992676 -5.581299 + -5.6572266 ]] + field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 50.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27678 + lbcode: 1 + lbhem: 0 + lbrow: 144 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 57 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 6 + lblev: 2 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1216512, 0, 3, 0, 0, 1) + brsvd: (80.00135, 0.9908815, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 49.998882 + brlev: 20.000338 + bhlev: 0.99429625 + bhrlev: 0.9977165 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -90.625 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[-1.5361328 -1.5249023 -1.5117188 ... -1.5610352 -1.5537109 + -1.5454102 ] + [-1.2714844 -1.2890625 -1.3078613 ... -1.2194824 -1.2355957 + -1.2526855 ] + [-1.0349121 -1.1855469 -1.3476562 ... -0.96240234 -0.94018555 + -0.9621582 ] + ... + [-2.333252 -2.1430664 -1.9562988 ... -2.888916 -2.708252 + -2.5219727 ] + [-2.2441406 -2.0427246 -1.8383789 ... -2.8112793 -2.6252441 + -2.4382324 ] + [-2.1965332 -2.0041504 -1.809082 ... -2.755127 -2.5720215 + -2.3859863 ]] + field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 50.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27867 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 27 + lbpack: 0 + lbrel: 2 + lbfc: 19 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 1 + lblev: 2 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1527808, 0, 4, 0, 0, 1) + brsvd: (130.00023, 0.98520386, 0.0, 0.0) + bdatum: 0.0 + bacc: -10.0 + blev: 80.00135 + brlev: 49.998882 + bhlev: 0.9908815 + bhrlev: 0.99429625 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[282.4961 282.4961 282.4961 ... 282.4961 282.4961 282.4961 ] + [282.38672 282.4121 282.4375 ... 282.31152 282.33594 282.36133] + [282.0957 282.16992 282.2422 ... 281.9121 281.96582 282.02734] + ... + [246.62598 246.63086 246.625 ... 246.59863 246.60938 246.61816] + [245.46387 245.46582 245.4707 ... 245.45703 245.46191 245.46387] + [244.5625 244.5625 244.5625 ... 244.5625 244.5625 244.5625 ]] + field_title: AJHQA Time mean !C Atmos theta after timestep at 80.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27870 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 95 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 13 + lblev: 2 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1818624, 0, 10, 0, 0, 1) + brsvd: (130.00023, 0.98520386, 0.0, 0.0) + bdatum: 0.0 + bacc: -99.0 + blev: 80.00135 + brlev: 49.998882 + bhlev: 0.9908815 + bhrlev: 0.99429625 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[0.00077913 0.00077913 0.00077913 ... 0.00077913 0.00077913 0.00077913] + [0.00085118 0.0008495 0.00084755 ... 0.00085498 0.00085392 0.00085248] + [0.00091165 0.00090317 0.00089486 ... 0.00092995 0.00092435 0.00091926] + ... + [0.00038609 0.00038648 0.00038594 ... 0.00038624 0.00038616 0.00038646] + [0.00034904 0.00034909 0.0003492 ... 0.0003502 0.00035007 0.00034903] + [0.00032891 0.00032891 0.00032891 ... 0.00032891 0.00032891 0.00032891]] + field_title: AJHQA Time mean !C Atmos specific humidity after timestep at 80.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27870 + lbcode: 1 + lbhem: 0 + lbrow: 145 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 56 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 5 + lblev: 3 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 913408, 0, 2, 0, 0, 1) + brsvd: (179.99911, 0.97954255, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 130.00023 + brlev: 80.00135 + bhlev: 0.98520386 + bhrlev: 0.9908815 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -91.25 + bdy: 1.25 + bzx: -0.9375 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[ 1.0524902 1.1252441 1.1967773 ... 0.8273926 0.90356445 + 0.9785156 ] + [-0.6694336 -0.61328125 -0.5529785 ... -0.8195801 -0.7685547 + -0.72021484] + [-1.3225098 -1.2358398 -1.1459961 ... -1.5771484 -1.4953613 + -1.4130859 ] + ... + [-6.96875 -7.027832 -7.0776367 ... -6.741455 -6.8256836 + -6.900879 ] + [-7.010498 -7.0480957 -7.0776367 ... -6.8447266 -6.9067383 + -6.963135 ] + [-6.9716797 -7.010254 -7.04126 ... -6.8120117 -6.8725586 + -6.9257812 ]] + field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 130.0 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +, PP Field + lbyr: 2007 + lbmon: 12 + lbdat: 1 + lbhr: 0 + lbmin: 0 + lbday: 336 + lbyrd: 2008 + lbmond: 1 + lbdatd: 1 + lbhrd: 0 + lbmind: 0 + lbdayd: 1 + lbtim: 121 + lbft: 26280 + lblrec: 27678 + lbcode: 1 + lbhem: 0 + lbrow: 144 + lbnpt: 192 + lbext: 30 + lbpack: 0 + lbrel: 2 + lbfc: 57 + lbcfc: 0 + lbproc: 128 + lbvc: 65 + lbrvc: 0 + lbexp: 2388992 + lbegin: 0 + lbnrec: 0 + lbproj: 802 + lbtyp: 6 + lblev: 3 + lbrsvd: (0, 0, 0, 0) + lbsrce: 6061111 + lbuser: (1, 1224704, 0, 3, 0, 0, 1) + brsvd: (179.99911, 0.97954255, 0.0, 0.0) + bdatum: 0.0 + bacc: -12.0 + blev: 130.00023 + brlev: 80.00135 + bhlev: 0.98520386 + bhrlev: 0.9908815 + bplat: 90.0 + bplon: 0.0 + bgor: 0.0 + bzy: -90.625 + bdy: 1.25 + bzx: -1.875 + bdx: 1.875 + bmdi: -1073741800.0 + bmks: 1.0 + data: [[-1.7414551 -1.7321777 -1.7211914 ... -1.7590332 -1.7546387 -1.7485352] + [-1.482666 -1.5065918 -1.5327148 ... -1.4162598 -1.4372559 -1.4589844] + [-1.3601074 -1.5227051 -1.6989746 ... -1.2714844 -1.2514648 -1.2753906] + ... + [-1.7216797 -1.4643555 -1.2097168 ... -2.4348145 -2.1984863 -1.9648438] + [-1.529541 -1.295166 -1.0639648 ... -2.2402344 -2.0048828 -1.7670898] + [-1.4748535 -1.2502441 -1.0231934 ... -2.137207 -1.9177246 -1.6970215]] + field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 130.0 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 +] \ No newline at end of file diff --git a/lib/iris/tests/results/abf/load.cml b/lib/iris/tests/results/abf/load.cml index e7954ab229..bf15e4499c 100644 --- a/lib/iris/tests/results/abf/load.cml +++ b/lib/iris/tests/results/abf/load.cml @@ -6,26 +6,26 @@ - + - + diff --git a/lib/iris/tests/results/analysis/abs.cml b/lib/iris/tests/results/analysis/abs.cml index b0a37b6074..524e05a09a 100644 --- a/lib/iris/tests/results/analysis/abs.cml +++ b/lib/iris/tests/results/analysis/abs.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition.cml b/lib/iris/tests/results/analysis/addition.cml index 4f9600694d..a0f4db9e58 100644 --- a/lib/iris/tests/results/analysis/addition.cml +++ b/lib/iris/tests/results/analysis/addition.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_coord_x.cml b/lib/iris/tests/results/analysis/addition_coord_x.cml index a086b8ad8b..4259c2d621 100644 --- a/lib/iris/tests/results/analysis/addition_coord_x.cml +++ b/lib/iris/tests/results/analysis/addition_coord_x.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_coord_y.cml b/lib/iris/tests/results/analysis/addition_coord_y.cml index 266e81c912..7b11e214fe 100644 --- a/lib/iris/tests/results/analysis/addition_coord_y.cml +++ b/lib/iris/tests/results/analysis/addition_coord_y.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_different_std_name.cml b/lib/iris/tests/results/analysis/addition_different_std_name.cml index 14b0b42dd8..b137858af8 100644 --- a/lib/iris/tests/results/analysis/addition_different_std_name.cml +++ b/lib/iris/tests/results/analysis/addition_different_std_name.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_in_place.cml b/lib/iris/tests/results/analysis/addition_in_place.cml index 4f9600694d..a0f4db9e58 100644 --- a/lib/iris/tests/results/analysis/addition_in_place.cml +++ b/lib/iris/tests/results/analysis/addition_in_place.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_in_place_coord.cml b/lib/iris/tests/results/analysis/addition_in_place_coord.cml index 00dee609eb..8559128b63 100644 --- a/lib/iris/tests/results/analysis/addition_in_place_coord.cml +++ b/lib/iris/tests/results/analysis/addition_in_place_coord.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/addition_scalar.cml b/lib/iris/tests/results/analysis/addition_scalar.cml index daf0050069..69853fa215 100644 --- a/lib/iris/tests/results/analysis/addition_scalar.cml +++ b/lib/iris/tests/results/analysis/addition_scalar.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/easy.cml b/lib/iris/tests/results/analysis/aggregated_by/easy.cml index d02c3f12d1..87b10a52cd 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/easy.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/easy.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/multi.cml b/lib/iris/tests/results/analysis/aggregated_by/multi.cml index 75cb67c054..6542b915a1 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/multi.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/multi.cml @@ -6,28 +6,29 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml index dc9bdd0df8..1558d17a9a 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml @@ -6,28 +6,29 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/multi_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/multi_shared.cml index 81d775e741..aa6fefc293 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/multi_shared.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/multi_shared.cml @@ -4,52 +4,55 @@ + [16, 15], + [14, 0], + [13, 11], + [10, 10], + [ 9, 8], + [ 7, 5], + [ 4, 4], + [ 3, 2]]" id="35dc92ed" long_name="gamma" points="[18. , 15.5, 7. , 12. , 10. , 8.5, 6. , 4. , + 2.5]" shape="(9,)" units="Unit('1')" value_type="float64"/> - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single.cml b/lib/iris/tests/results/analysis/aggregated_by/single.cml index 3f2ea6fce2..bc6cbd0301 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single.cml @@ -6,24 +6,24 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml index 51e1ae4ff1..df1a9861d4 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml @@ -6,24 +6,24 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_rms.cml b/lib/iris/tests/results/analysis/aggregated_by/single_rms.cml index 2961a6b48d..34bd38240e 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_rms.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_rms.cml @@ -6,24 +6,24 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/single_shared.cml index adbf893864..a554a1083d 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_shared.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_shared.cml @@ -6,34 +6,34 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_shared_circular.cml b/lib/iris/tests/results/analysis/aggregated_by/single_shared_circular.cml index eba017837d..ec1d9b5780 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_shared_circular.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_shared_circular.cml @@ -3,38 +3,37 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml index 8c434479c9..f6ffc02b55 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml index cca744ff87..78703b47eb 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml @@ -6,28 +6,29 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml index 8c11bdb505..120084b030 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml @@ -6,28 +6,29 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml index ab7a7195fd..1758cf3791 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml @@ -4,52 +4,55 @@ + [16, 15], + [14, 0], + [13, 11], + [10, 10], + [ 9, 8], + [ 7, 5], + [ 4, 4], + [ 3, 2]]" id="35dc92ed" long_name="gamma" points="[18. , 15.5, 7. , 12. , 10. , 8.5, 6. , 4. , + 2.5]" shape="(9,)" units="Unit('1')" value_type="float64"/> - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml index d5bb9775fe..96a7e4ec85 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml @@ -6,24 +6,24 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml index f7d57a9828..8d11643346 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml @@ -6,24 +6,24 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml index 50a2c44a98..dad52ae602 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml @@ -6,34 +6,34 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml index 657fb43414..e371728745 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml @@ -3,38 +3,37 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ifunc.cml b/lib/iris/tests/results/analysis/apply_ifunc.cml index fe0e394ee6..e2f5658832 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml index 29cb6f611e..d3405f401f 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ifunc_original.cml b/lib/iris/tests/results/analysis/apply_ifunc_original.cml index 62a569f7cc..b01e2134af 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc_original.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc_original.cml @@ -7,36 +7,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ufunc.cml b/lib/iris/tests/results/analysis/apply_ufunc.cml index fe0e394ee6..e2f5658832 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml index 7b1511f028..670f74a9ba 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/apply_ufunc_original.cml b/lib/iris/tests/results/analysis/apply_ufunc_original.cml index 62a569f7cc..b01e2134af 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc_original.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc_original.cml @@ -7,36 +7,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/areaweights_original.cml b/lib/iris/tests/results/analysis/areaweights_original.cml index 651bb648dd..dab90dcfd5 100644 --- a/lib/iris/tests/results/analysis/areaweights_original.cml +++ b/lib/iris/tests/results/analysis/areaweights_original.cml @@ -7,26 +7,27 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/cos_simple.xml b/lib/iris/tests/results/analysis/calculus/cos_simple.xml index 478902833f..2b624df1c4 100644 --- a/lib/iris/tests/results/analysis/calculus/cos_simple.xml +++ b/lib/iris/tests/results/analysis/calculus/cos_simple.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/cos_simple_radians.xml b/lib/iris/tests/results/analysis/calculus/cos_simple_radians.xml index 478902833f..2b624df1c4 100644 --- a/lib/iris/tests/results/analysis/calculus/cos_simple_radians.xml +++ b/lib/iris/tests/results/analysis/calculus/cos_simple_radians.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/curl_contrived_cartesian2.cml b/lib/iris/tests/results/analysis/calculus/curl_contrived_cartesian2.cml index a744dfc782..96ea1ecc60 100644 --- a/lib/iris/tests/results/analysis/calculus/curl_contrived_cartesian2.cml +++ b/lib/iris/tests/results/analysis/calculus/curl_contrived_cartesian2.cml @@ -3,25 +3,28 @@ - + - + - + @@ -34,25 +37,28 @@ - + - + - + @@ -65,25 +71,28 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/delta_handmade_simple_wrt_x.cml b/lib/iris/tests/results/analysis/calculus/delta_handmade_simple_wrt_x.cml index ee1301b11d..b4f065084b 100644 --- a/lib/iris/tests/results/analysis/calculus/delta_handmade_simple_wrt_x.cml +++ b/lib/iris/tests/results/analysis/calculus/delta_handmade_simple_wrt_x.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lat.cml b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lat.cml index 0693498989..86f407a6f2 100644 --- a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lat.cml +++ b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lat.cml @@ -3,17 +3,17 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lon.cml b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lon.cml index 376c624265..5b624bf398 100644 --- a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lon.cml +++ b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_lon.cml @@ -3,12 +3,12 @@ - + - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_x.cml b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_x.cml index d54dae3424..30441d8a56 100644 --- a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_x.cml +++ b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_x.cml @@ -3,12 +3,12 @@ - + - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_y.cml b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_y.cml index 7561c7b02f..2ce91bd232 100644 --- a/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_y.cml +++ b/lib/iris/tests/results/analysis/calculus/delta_handmade_wrt_y.cml @@ -3,17 +3,17 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/grad_contrived1.cml b/lib/iris/tests/results/analysis/calculus/grad_contrived1.cml index 0696e1be75..d4fffd150a 100644 --- a/lib/iris/tests/results/analysis/calculus/grad_contrived1.cml +++ b/lib/iris/tests/results/analysis/calculus/grad_contrived1.cml @@ -3,28 +3,28 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/grad_contrived2.cml b/lib/iris/tests/results/analysis/calculus/grad_contrived2.cml index ffa976d4a4..7433be8bc2 100644 --- a/lib/iris/tests/results/analysis/calculus/grad_contrived2.cml +++ b/lib/iris/tests/results/analysis/calculus/grad_contrived2.cml @@ -3,32 +3,40 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/grad_contrived_non_spherical1.cml b/lib/iris/tests/results/analysis/calculus/grad_contrived_non_spherical1.cml index 077e3df4ab..c01b94e6db 100644 --- a/lib/iris/tests/results/analysis/calculus/grad_contrived_non_spherical1.cml +++ b/lib/iris/tests/results/analysis/calculus/grad_contrived_non_spherical1.cml @@ -3,24 +3,28 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lat.cml b/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lat.cml index 7cc09660ab..eda0fd2036 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lat.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lat.cml @@ -4,25 +4,27 @@ + -60.75, -56.25, -51.75, -47.25, -42.75, -38.25, + -33.75, -29.25, -24.75, -20.25, -15.75, -11.25, + -6.75, -2.25, 2.25, 6.75, 11.25, 15.75, + 20.25, 24.75, 29.25, 33.75, 38.25, 42.75, + 47.25, 51.75, 56.25, 60.75, 65.25, 69.75, + 74.25, 78.75, 83.25, 87.75, 92.25, 96.75, + 101.25, 105.75, 110.25, 114.75, 119.25, 123.75, + 128.25]" shape="(49,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"> - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lon.cml b/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lon.cml index ced788b5c6..6e929a2e79 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lon.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade2_wrt_lon.cml @@ -3,26 +3,28 @@ - + + -155.25, -150.75, -146.25, -141.75, -137.25, + -132.75, -128.25, -123.75, -119.25, -114.75, + -110.25, -105.75, -101.25, -96.75, -92.25, + -87.75, -83.25, -78.75, -74.25, -69.75, + -65.25, -60.75, -56.25, -51.75, -47.25, + -42.75, -38.25, -33.75, -29.25, -24.75, + -20.25, -15.75, -11.25, -6.75, -2.25, + 2.25, 6.75, 11.25, 15.75, 20.25, + 24.75, 29.25, 33.75, 38.25]" shape="(49,)" standard_name="longitude" units="Unit('degrees')" value_type="float32"> diff --git a/lib/iris/tests/results/analysis/calculus/handmade_simple_wrt_x.cml b/lib/iris/tests/results/analysis/calculus/handmade_simple_wrt_x.cml index c055a46e59..adbd8c4dac 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade_simple_wrt_x.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade_simple_wrt_x.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade_wrt_lat.cml b/lib/iris/tests/results/analysis/calculus/handmade_wrt_lat.cml index 98612df27b..39db8cb583 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade_wrt_lat.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade_wrt_lat.cml @@ -3,17 +3,17 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade_wrt_lon.cml b/lib/iris/tests/results/analysis/calculus/handmade_wrt_lon.cml index ceeb537ac6..fb80441bd7 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade_wrt_lon.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade_wrt_lon.cml @@ -3,12 +3,12 @@ - + - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade_wrt_x.cml b/lib/iris/tests/results/analysis/calculus/handmade_wrt_x.cml index cbe823b2e0..b43273a21f 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade_wrt_x.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade_wrt_x.cml @@ -3,12 +3,12 @@ - + - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/analysis/calculus/handmade_wrt_y.cml b/lib/iris/tests/results/analysis/calculus/handmade_wrt_y.cml index b0eaa31da8..9698f9ec8d 100644 --- a/lib/iris/tests/results/analysis/calculus/handmade_wrt_y.cml +++ b/lib/iris/tests/results/analysis/calculus/handmade_wrt_y.cml @@ -3,17 +3,17 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/count_bar_2d.cml b/lib/iris/tests/results/analysis/count_bar_2d.cml index 3457187d4e..49d25934a2 100644 --- a/lib/iris/tests/results/analysis/count_bar_2d.cml +++ b/lib/iris/tests/results/analysis/count_bar_2d.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/count_foo_1d.cml b/lib/iris/tests/results/analysis/count_foo_1d.cml index 6a76951959..f611029fa2 100644 --- a/lib/iris/tests/results/analysis/count_foo_1d.cml +++ b/lib/iris/tests/results/analysis/count_foo_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/count_foo_2d.cml b/lib/iris/tests/results/analysis/count_foo_2d.cml index af4ee81c3f..9fcac4a5bd 100644 --- a/lib/iris/tests/results/analysis/count_foo_2d.cml +++ b/lib/iris/tests/results/analysis/count_foo_2d.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/count_foo_bar_2d.cml b/lib/iris/tests/results/analysis/count_foo_bar_2d.cml index 47a25bbd84..73ca30312b 100644 --- a/lib/iris/tests/results/analysis/count_foo_bar_2d.cml +++ b/lib/iris/tests/results/analysis/count_foo_bar_2d.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/delta_one_element_explicit.xml b/lib/iris/tests/results/analysis/delta_and_midpoint/delta_one_element_explicit.xml index 494d198e64..41e7d6453a 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/delta_one_element_explicit.xml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/delta_one_element_explicit.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/midpoint_one_element_explicit.xml b/lib/iris/tests/results/analysis/delta_and_midpoint/midpoint_one_element_explicit.xml index 8b68a16b47..a09710eaf3 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/midpoint_one_element_explicit.xml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/midpoint_one_element_explicit.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1.cml index b4c123e294..5927c572e4 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_delta.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_delta.cml index c81ccfc9e8..a87393f917 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_delta.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_delta.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_midpoint.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_midpoint.cml index f97d74bff8..020cff992d 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_midpoint.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple1_midpoint.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2.cml index 50bc4d77c1..c56488a758 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_delta.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_delta.cml index a4621734d3..7e965ff4d5 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_delta.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_delta.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_midpoint.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_midpoint.cml index a981e2b79c..c04f72ce52 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_midpoint.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple2_midpoint.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3.cml index f088c97634..467e78de62 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3.cml @@ -1,5 +1,5 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_delta.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_delta.cml index 74d7546592..3d7e1bc12d 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_delta.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_delta.cml @@ -1,5 +1,5 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_midpoint.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_midpoint.cml index 961a953ea5..b193b0015c 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_midpoint.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple3_midpoint.cml @@ -1,5 +1,5 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4.cml index fd4e8ed6bf..c07dcbc18a 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4.cml @@ -1,5 +1,5 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_delta.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_delta.cml index dc6b09a87a..d59b173304 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_delta.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_delta.cml @@ -1,4 +1,4 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_midpoint.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_midpoint.cml index e413c214e9..d954504f42 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_midpoint.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple4_midpoint.cml @@ -1,4 +1,4 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5.cml index 0aad76ca07..05770d2c52 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_delta.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_delta.cml index 73ee9c9070..aec0ded3f3 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_delta.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_delta.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_midpoint.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_midpoint.cml index 3e93c682ba..591ba00330 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_midpoint.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple5_midpoint.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/delta_and_midpoint/simple6.cml b/lib/iris/tests/results/analysis/delta_and_midpoint/simple6.cml index 6413204d03..fe47685acd 100644 --- a/lib/iris/tests/results/analysis/delta_and_midpoint/simple6.cml +++ b/lib/iris/tests/results/analysis/delta_and_midpoint/simple6.cml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/analysis/division.cml b/lib/iris/tests/results/analysis/division.cml index 762f51ec0a..90fe592390 100644 --- a/lib/iris/tests/results/analysis/division.cml +++ b/lib/iris/tests/results/analysis/division.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/division_by_array.cml b/lib/iris/tests/results/analysis/division_by_array.cml index 14b0b42dd8..b137858af8 100644 --- a/lib/iris/tests/results/analysis/division_by_array.cml +++ b/lib/iris/tests/results/analysis/division_by_array.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/division_by_latitude.cml b/lib/iris/tests/results/analysis/division_by_latitude.cml index 42437d1e36..c05e82f7f3 100644 --- a/lib/iris/tests/results/analysis/division_by_latitude.cml +++ b/lib/iris/tests/results/analysis/division_by_latitude.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/division_by_longitude.cml b/lib/iris/tests/results/analysis/division_by_longitude.cml index 264ce9b793..243b4158af 100644 --- a/lib/iris/tests/results/analysis/division_by_longitude.cml +++ b/lib/iris/tests/results/analysis/division_by_longitude.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/division_by_singular_coord.cml b/lib/iris/tests/results/analysis/division_by_singular_coord.cml index 4c9c58d760..6e91963596 100644 --- a/lib/iris/tests/results/analysis/division_by_singular_coord.cml +++ b/lib/iris/tests/results/analysis/division_by_singular_coord.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/division_scalar.cml b/lib/iris/tests/results/analysis/division_scalar.cml index 14b0b42dd8..b137858af8 100644 --- a/lib/iris/tests/results/analysis/division_scalar.cml +++ b/lib/iris/tests/results/analysis/division_scalar.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/exp.cml b/lib/iris/tests/results/analysis/exp.cml index 357a84363e..120a71e587 100644 --- a/lib/iris/tests/results/analysis/exp.cml +++ b/lib/iris/tests/results/analysis/exp.cml @@ -3,17 +3,17 @@ - + diff --git a/lib/iris/tests/results/analysis/exponentiate.cml b/lib/iris/tests/results/analysis/exponentiate.cml index bb825f6714..066e7c3749 100644 --- a/lib/iris/tests/results/analysis/exponentiate.cml +++ b/lib/iris/tests/results/analysis/exponentiate.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml index f027f2d9f8..eb2a08de76 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml index f027f2d9f8..eb2a08de76 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml index 1bc809ce63..ca83009959 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml index 1bc809ce63..ca83009959 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml index cadd1e8b65..16f8ec2d69 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml index cadd1e8b65..16f8ec2d69 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/analysis/gmean_latitude.cml b/lib/iris/tests/results/analysis/gmean_latitude.cml index 26b7fdc8af..ca4a5a39f2 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml index 94ed36ac88..a31a89ab34 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml index 1db977312b..dd97d15f27 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/hmean_latitude.cml b/lib/iris/tests/results/analysis/hmean_latitude.cml index 70e3fcb540..86af8f99bc 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml index f762fd643b..3b469be379 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml index 369dca3203..759e94104a 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml index 059541e208..cd3d7ac69a 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml @@ -3,16 +3,16 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml index 059541e208..cd3d7ac69a 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml @@ -3,16 +3,16 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml index 059541e208..cd3d7ac69a 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml @@ -3,16 +3,16 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/log.cml b/lib/iris/tests/results/analysis/log.cml index c24e071dc5..9a90864c58 100644 --- a/lib/iris/tests/results/analysis/log.cml +++ b/lib/iris/tests/results/analysis/log.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/log10.cml b/lib/iris/tests/results/analysis/log10.cml index abd4065526..226322cb61 100644 --- a/lib/iris/tests/results/analysis/log10.cml +++ b/lib/iris/tests/results/analysis/log10.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/log2.cml b/lib/iris/tests/results/analysis/log2.cml index d121ad9a9d..0c26538dd4 100644 --- a/lib/iris/tests/results/analysis/log2.cml +++ b/lib/iris/tests/results/analysis/log2.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/maths_original.cml b/lib/iris/tests/results/analysis/maths_original.cml index 15fbb5210f..f3f838f1b8 100644 --- a/lib/iris/tests/results/analysis/maths_original.cml +++ b/lib/iris/tests/results/analysis/maths_original.cml @@ -7,36 +7,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/max_latitude.cml b/lib/iris/tests/results/analysis/max_latitude.cml index 89542d27d3..fa00aacec5 100644 --- a/lib/iris/tests/results/analysis/max_latitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude.cml b/lib/iris/tests/results/analysis/max_latitude_longitude.cml index 7d24ca7f14..801d4302fa 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml index b4d1e0349c..2dc352e208 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d.cml b/lib/iris/tests/results/analysis/max_run_bar_2d.cml index 32a8a377be..6d56c2220b 100644 --- a/lib/iris/tests/results/analysis/max_run_bar_2d.cml +++ b/lib/iris/tests/results/analysis/max_run_bar_2d.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml index 32a8a377be..6d56c2220b 100644 --- a/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml +++ b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/max_run_foo_1d.cml b/lib/iris/tests/results/analysis/max_run_foo_1d.cml index b2a3bcef56..a5f53306db 100644 --- a/lib/iris/tests/results/analysis/max_run_foo_1d.cml +++ b/lib/iris/tests/results/analysis/max_run_foo_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/max_run_foo_2d.cml b/lib/iris/tests/results/analysis/max_run_foo_2d.cml index fb8448136f..45e9836823 100644 --- a/lib/iris/tests/results/analysis/max_run_foo_2d.cml +++ b/lib/iris/tests/results/analysis/max_run_foo_2d.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/mean_latitude.cml b/lib/iris/tests/results/analysis/mean_latitude.cml index 80921e762d..44b26db3fb 100644 --- a/lib/iris/tests/results/analysis/mean_latitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml index 6ac9400a3a..0991425a9a 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml index affcf07c07..1b5ca1e3dc 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/median_latitude.cml b/lib/iris/tests/results/analysis/median_latitude.cml index bbf3875688..b5439ed225 100644 --- a/lib/iris/tests/results/analysis/median_latitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude.cml b/lib/iris/tests/results/analysis/median_latitude_longitude.cml index 5663f6d65f..f8116848a6 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml index c0c0d7c46b..53fd4ef29d 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/min_latitude.cml b/lib/iris/tests/results/analysis/min_latitude.cml index bf20be30a9..13e52696f8 100644 --- a/lib/iris/tests/results/analysis/min_latitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude.cml b/lib/iris/tests/results/analysis/min_latitude_longitude.cml index 3792645582..78cd58ca93 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml index b43231b7e6..672cef058a 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/multiply.cml b/lib/iris/tests/results/analysis/multiply.cml index 8fb8658f5d..0a3c2cfb03 100644 --- a/lib/iris/tests/results/analysis/multiply.cml +++ b/lib/iris/tests/results/analysis/multiply.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/multiply_different_std_name.cml b/lib/iris/tests/results/analysis/multiply_different_std_name.cml index 2d89e5882f..829bbcc582 100644 --- a/lib/iris/tests/results/analysis/multiply_different_std_name.cml +++ b/lib/iris/tests/results/analysis/multiply_different_std_name.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/original.cml b/lib/iris/tests/results/analysis/original.cml index 414de1b6b5..b958136bd1 100644 --- a/lib/iris/tests/results/analysis/original.cml +++ b/lib/iris/tests/results/analysis/original.cml @@ -8,26 +8,26 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/original_common.cml b/lib/iris/tests/results/analysis/original_common.cml index bbfa48d7d8..258ca67c46 100644 --- a/lib/iris/tests/results/analysis/original_common.cml +++ b/lib/iris/tests/results/analysis/original_common.cml @@ -8,26 +8,26 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/original_hmean.cml b/lib/iris/tests/results/analysis/original_hmean.cml index bdc145022c..28cea63268 100644 --- a/lib/iris/tests/results/analysis/original_hmean.cml +++ b/lib/iris/tests/results/analysis/original_hmean.cml @@ -8,26 +8,26 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/proportion_bar_2d.cml b/lib/iris/tests/results/analysis/proportion_bar_2d.cml index 263fcaba9e..f28f4b1546 100644 --- a/lib/iris/tests/results/analysis/proportion_bar_2d.cml +++ b/lib/iris/tests/results/analysis/proportion_bar_2d.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/proportion_foo_1d.cml b/lib/iris/tests/results/analysis/proportion_foo_1d.cml index a0bd3c982f..6ebd3e0f39 100644 --- a/lib/iris/tests/results/analysis/proportion_foo_1d.cml +++ b/lib/iris/tests/results/analysis/proportion_foo_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/proportion_foo_2d.cml b/lib/iris/tests/results/analysis/proportion_foo_2d.cml index d715499e58..f2c803bb71 100644 --- a/lib/iris/tests/results/analysis/proportion_foo_2d.cml +++ b/lib/iris/tests/results/analysis/proportion_foo_2d.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/proportion_foo_2d_masked.cml b/lib/iris/tests/results/analysis/proportion_foo_2d_masked.cml index 263fcaba9e..f28f4b1546 100644 --- a/lib/iris/tests/results/analysis/proportion_foo_2d_masked.cml +++ b/lib/iris/tests/results/analysis/proportion_foo_2d_masked.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/proportion_foo_bar_2d.cml b/lib/iris/tests/results/analysis/proportion_foo_bar_2d.cml index 77123dd86e..9baab831e1 100644 --- a/lib/iris/tests/results/analysis/proportion_foo_bar_2d.cml +++ b/lib/iris/tests/results/analysis/proportion_foo_bar_2d.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_both_circular.cml b/lib/iris/tests/results/analysis/regrid/linear_both_circular.cml index 576ab4ace6..2ee0fc00d9 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_both_circular.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_both_circular.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_circular_grid.cml b/lib/iris/tests/results/analysis/regrid/linear_circular_grid.cml index d8fd78a749..3544db9698 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_circular_grid.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_circular_grid.cml @@ -7,23 +7,27 @@ - + - + - + - + @@ -35,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_circular_src.cml b/lib/iris/tests/results/analysis/regrid/linear_circular_src.cml index 1032b4fc6e..296de665da 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_circular_src.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_circular_src.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_circular_srcmissingmask.cml b/lib/iris/tests/results/analysis/regrid/linear_circular_srcmissingmask.cml index 1032b4fc6e..296de665da 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_circular_srcmissingmask.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_circular_srcmissingmask.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml index 1ac69490b4..b719738a62 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml @@ -6,85 +6,94 @@ - + [[424.42307, 398.04324, nan, nan, + nan], + [368.6881 , 343.87836, nan, nan, + nan], + [375.09146, 347.86066, nan, nan, + nan], + [446.16125, 414.22037, nan, nan, + nan]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_non_circular.cml b/lib/iris/tests/results/analysis/regrid/linear_non_circular.cml index 064409dde5..bb678502c1 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_non_circular.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_non_circular.cml @@ -7,23 +7,27 @@ - + - + - + - + @@ -35,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml index eb9adb4aef..fc39fee0f5 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml @@ -6,77 +6,78 @@ - + [[ nan, nan, 367.72552, 355.62955], + [ nan, nan, 340.44327, 407.57434], + [ nan, nan, 336.60175, 419.0933 ], + [ nan, nan, 376.38995, 341.02115]]]" shape="(2, 4, 4)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -88,18 +89,19 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset.cml b/lib/iris/tests/results/analysis/regrid/linear_subset.cml index 9bd62287fe..0121d84ebf 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset.cml @@ -6,85 +6,94 @@ - + [[424.42307, 398.04324, 305.16385, 254.07837, + 340.82806], + [368.6881 , 343.87836, 348.51068, 368.9184 , + 407.57434], + [375.09146, 347.86066, 370.53574, 395.5417 , + 397.02896], + [446.16125, 414.22037, 365.36652, 322.28683, + 296.69153]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_anon.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_anon.cml index 1945b03a1a..ea3a804166 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_anon.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_anon.cml @@ -6,85 +6,94 @@ - + [[424.42307, 398.04324, 305.16385, 254.07837, + 340.82806], + [368.6881 , 343.87836, 348.51068, 368.9184 , + 407.57434], + [375.09146, 347.86066, 370.53574, 395.5417 , + 397.02896], + [446.16125, 414.22037, 365.36652, 322.28683, + 296.69153]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,14 +105,18 @@ - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml index 9bd62287fe..0121d84ebf 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml @@ -6,85 +6,94 @@ - + [[424.42307, 398.04324, 305.16385, 254.07837, + 340.82806], + [368.6881 , 343.87836, 348.51068, 368.9184 , + 407.57434], + [375.09146, 347.86066, 370.53574, 395.5417 , + 397.02896], + [446.16125, 414.22037, 365.36652, 322.28683, + 296.69153]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml index 9bd62287fe..0121d84ebf 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml @@ -6,85 +6,94 @@ - + [[424.42307, 398.04324, 305.16385, 254.07837, + 340.82806], + [368.6881 , 343.87836, 348.51068, 368.9184 , + 407.57434], + [375.09146, 347.86066, 370.53574, 395.5417 , + 397.02896], + [446.16125, 414.22037, 365.36652, 322.28683, + 296.69153]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_both_circular.cml b/lib/iris/tests/results/analysis/regrid/nearest_both_circular.cml index d8f1a9d0f6..9352ae6076 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_both_circular.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_both_circular.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_circular_grid.cml b/lib/iris/tests/results/analysis/regrid/nearest_circular_grid.cml index 16863839a1..c13e7872a2 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_circular_grid.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_circular_grid.cml @@ -7,23 +7,27 @@ - + - + - + - + @@ -35,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_circular_src.cml b/lib/iris/tests/results/analysis/regrid/nearest_circular_src.cml index 5eb032cf2c..400efcd7fa 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_circular_src.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_circular_src.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_circular_srcmissingmask.cml b/lib/iris/tests/results/analysis/regrid/nearest_circular_srcmissingmask.cml index 5eb032cf2c..400efcd7fa 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_circular_srcmissingmask.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_circular_srcmissingmask.cml @@ -7,27 +7,27 @@ - + - + - + - + @@ -39,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml index a1cff2363e..905109b6b7 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml @@ -6,85 +6,94 @@ - + [[434.5705 , 395.5391 , 219.27228, 219.27228, + 349.64597], + [345.97134, 310.52786, nan, nan, + 444.776 ], + [345.97134, 310.52786, nan, nan, + 444.776 ], + [461.227 , 414.88275, 323.68027, 323.68027, + 280.81027]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_non_circular.cml b/lib/iris/tests/results/analysis/regrid/nearest_non_circular.cml index da162648be..6978ec7200 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_non_circular.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_non_circular.cml @@ -7,23 +7,27 @@ - + - + - + - + @@ -35,24 +39,24 @@ - + - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml index 98a0b6b805..a769ed4a38 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml @@ -6,77 +6,78 @@ - + [[ nan, nan, 395.5391 , 349.64597], + [ nan, nan, 310.52786, 444.776 ], + [ nan, nan, 310.52786, 444.776 ], + [ nan, nan, 414.88275, 280.81027]]]" shape="(2, 4, 4)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -88,18 +89,19 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml index a704cbecbb..6d7ef1b453 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml @@ -6,85 +6,94 @@ - + [[434.5705 , 395.5391 , 219.27228, 219.27228, + 349.64597], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [461.227 , 414.88275, 323.68027, 323.68027, + 280.81027]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_anon.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_anon.cml index 40390f387c..c40a3475a3 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_anon.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_anon.cml @@ -6,85 +6,94 @@ - + [[434.5705 , 395.5391 , 219.27228, 219.27228, + 349.64597], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [461.227 , 414.88275, 323.68027, 323.68027, + 280.81027]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,14 +105,18 @@ - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml index a704cbecbb..6d7ef1b453 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml @@ -6,85 +6,94 @@ - + [[434.5705 , 395.5391 , 219.27228, 219.27228, + 349.64597], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [461.227 , 414.88275, 323.68027, 323.68027, + 280.81027]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml index a704cbecbb..6d7ef1b453 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml @@ -6,85 +6,94 @@ - + [[434.5705 , 395.5391 , 219.27228, 219.27228, + 349.64597], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [345.97134, 310.52786, 425.15723, 425.15723, + 444.776 ], + [461.227 , 414.88275, 323.68027, 323.68027, + 280.81027]]]" shape="(2, 4, 5)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -96,18 +105,23 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/regrid/no_overlap.cml b/lib/iris/tests/results/analysis/regrid/no_overlap.cml index da2f03f1ee..19033c255d 100644 --- a/lib/iris/tests/results/analysis/regrid/no_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/no_overlap.cml @@ -7,76 +7,78 @@ + [[nan, nan, nan, nan], + [nan, nan, nan, nan], + [nan, nan, nan, nan], + [nan, nan, nan, nan]]]" shape="(2, 4, 4)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -88,18 +90,19 @@ - + + [nan, nan, nan, nan], + [nan, nan, nan, nan], + [nan, nan, nan, nan]]" shape="(4, 4)" standard_name="surface_altitude" units="Unit('m')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/analysis/rms_latitude.cml b/lib/iris/tests/results/analysis/rms_latitude.cml index d4b1428fb2..e3b82802ca 100644 --- a/lib/iris/tests/results/analysis/rms_latitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml index 4293087847..d0c7c95535 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml index 9ca1d23b42..887b8b6ebb 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/rms_weighted_2d.cml b/lib/iris/tests/results/analysis/rms_weighted_2d.cml index 433e27d359..b315bd0983 100644 --- a/lib/iris/tests/results/analysis/rms_weighted_2d.cml +++ b/lib/iris/tests/results/analysis/rms_weighted_2d.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/rolling_window/simple_latitude.cml b/lib/iris/tests/results/analysis/rolling_window/simple_latitude.cml index ff64076f83..2eb8d59561 100644 --- a/lib/iris/tests/results/analysis/rolling_window/simple_latitude.cml +++ b/lib/iris/tests/results/analysis/rolling_window/simple_latitude.cml @@ -3,11 +3,11 @@ - + - + diff --git a/lib/iris/tests/results/analysis/rolling_window/simple_longitude.cml b/lib/iris/tests/results/analysis/rolling_window/simple_longitude.cml index b2c422057e..7979ae25b6 100644 --- a/lib/iris/tests/results/analysis/rolling_window/simple_longitude.cml +++ b/lib/iris/tests/results/analysis/rolling_window/simple_longitude.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/analysis/rolling_window/size_4_longitude.cml b/lib/iris/tests/results/analysis/rolling_window/size_4_longitude.cml index 0e4330ce82..6c19e04f6f 100644 --- a/lib/iris/tests/results/analysis/rolling_window/size_4_longitude.cml +++ b/lib/iris/tests/results/analysis/rolling_window/size_4_longitude.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/analysis/sqrt.cml b/lib/iris/tests/results/analysis/sqrt.cml index f8a1c48fc3..6bdeaee3e9 100644 --- a/lib/iris/tests/results/analysis/sqrt.cml +++ b/lib/iris/tests/results/analysis/sqrt.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/std_dev_latitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude.cml index a45aefeff4..fec9f9d09c 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml index 95e8e3694d..86d60a29ad 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml index f91f6005b7..26baf44a65 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/subtract.cml b/lib/iris/tests/results/analysis/subtract.cml index 3466578756..d8a56d895b 100644 --- a/lib/iris/tests/results/analysis/subtract.cml +++ b/lib/iris/tests/results/analysis/subtract.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/subtract_array.cml b/lib/iris/tests/results/analysis/subtract_array.cml index 14b0b42dd8..b137858af8 100644 --- a/lib/iris/tests/results/analysis/subtract_array.cml +++ b/lib/iris/tests/results/analysis/subtract_array.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/subtract_coord_x.cml b/lib/iris/tests/results/analysis/subtract_coord_x.cml index 060814c6ba..ae951e328f 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_x.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_x.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/subtract_coord_y.cml b/lib/iris/tests/results/analysis/subtract_coord_y.cml index 4a9351cf6f..0aaf05808c 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_y.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_y.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/subtract_scalar.cml b/lib/iris/tests/results/analysis/subtract_scalar.cml index f458364143..889cde24bd 100644 --- a/lib/iris/tests/results/analysis/subtract_scalar.cml +++ b/lib/iris/tests/results/analysis/subtract_scalar.cml @@ -6,36 +6,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/sum_latitude.cml b/lib/iris/tests/results/analysis/sum_latitude.cml index fbb8460fd8..bef5f48f72 100644 --- a/lib/iris/tests/results/analysis/sum_latitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml index cb992f3b9d..f5ce9b622c 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml index 6171dc516b..3dca019667 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/sum_weighted_1d.cml b/lib/iris/tests/results/analysis/sum_weighted_1d.cml index 09958e4eb0..3579d60d1a 100644 --- a/lib/iris/tests/results/analysis/sum_weighted_1d.cml +++ b/lib/iris/tests/results/analysis/sum_weighted_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/sum_weighted_2d.cml b/lib/iris/tests/results/analysis/sum_weighted_2d.cml index 57cf7d3d1f..4b8b04b1aa 100644 --- a/lib/iris/tests/results/analysis/sum_weighted_2d.cml +++ b/lib/iris/tests/results/analysis/sum_weighted_2d.cml @@ -3,13 +3,13 @@ - + - + diff --git a/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml b/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml index 038e7c8668..78d56bc4ec 100644 --- a/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml +++ b/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml b/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml index 038e7c8668..78d56bc4ec 100644 --- a/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/analysis/variance_latitude.cml b/lib/iris/tests/results/analysis/variance_latitude.cml index 5b55731396..1efa3dc26c 100644 --- a/lib/iris/tests/results/analysis/variance_latitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude.cml @@ -8,26 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml index 359e40ef8a..9fbd2bac53 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml index 0345eac77b..53484137ca 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml @@ -8,13 +8,13 @@ - + - + - + @@ -24,9 +24,8 @@ - + diff --git a/lib/iris/tests/results/analysis/weighted_mean_lat.cml b/lib/iris/tests/results/analysis/weighted_mean_lat.cml index d2bb6f0df4..7786112b9c 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_lat.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_lat.cml @@ -3,15 +3,15 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/weighted_mean_latlon.cml b/lib/iris/tests/results/analysis/weighted_mean_latlon.cml index e25e74c021..c7addc162a 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_latlon.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_latlon.cml @@ -3,15 +3,15 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/weighted_mean_lon.cml b/lib/iris/tests/results/analysis/weighted_mean_lon.cml index 6ce89976b6..2fc50bf6d4 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_lon.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_lon.cml @@ -3,15 +3,15 @@ - + - + - + diff --git a/lib/iris/tests/results/analysis/weighted_mean_original.cml b/lib/iris/tests/results/analysis/weighted_mean_original.cml index a69e633e26..a013add0cb 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_original.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_original.cml @@ -7,37 +7,63 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/analysis/weighted_mean_source.cml b/lib/iris/tests/results/analysis/weighted_mean_source.cml index eb72035a4f..9ea20dfe46 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_source.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_source.cml @@ -3,15 +3,15 @@ - + - + - + diff --git a/lib/iris/tests/results/categorisation/customcheck.cml b/lib/iris/tests/results/categorisation/customcheck.cml index 476a1c56ef..7fd65b6965 100644 --- a/lib/iris/tests/results/categorisation/customcheck.cml +++ b/lib/iris/tests/results/categorisation/customcheck.cml @@ -4,22 +4,23 @@ + 0, 1, 1, 1, 1, 1, 2]" shape="(23,)" units="Unit('1')" value_type="int64"/> + 1970, 1970, 1970, 1970, 1970, 1971, 1971, 1971, + 1971, 1971, 1971, 1971, 1971, 1971, 1971]" shape="(23,)" units="Unit('1')" value_type="int64"/> - + - + diff --git a/lib/iris/tests/results/categorisation/quickcheck.cml b/lib/iris/tests/results/categorisation/quickcheck.cml index b8f3904ad1..58a8fafa5c 100644 --- a/lib/iris/tests/results/categorisation/quickcheck.cml +++ b/lib/iris/tests/results/categorisation/quickcheck.cml @@ -3,72 +3,77 @@ - + - + - + - + + 2, 0, 1, 1, 2, 0, 1]" shape="(23,)" units="Unit('1')" value_type="int64"/> - + - + + 1, 1, 1, 1, 2, 2, 2]" shape="(23,)" units="Unit('1')" value_type="int64"/> + 1970, 1970, 1970, 1970, 1970, 1971, 1971, 1971, + 1971, 1971, 1971, 1971, 1971, 1971, 1971]" shape="(23,)" units="Unit('1')" value_type="int64"/> - + - + + 1, 0, 6, 5, 4, 3, 2]" shape="(23,)" units="Unit('1')" value_type="int64"/> + 1970, 1970, 1970, 1970, 1970, 1970, 1971, 1971, + 1971, 1971, 1971, 1971, 1971, 1971, 1971]" shape="(23,)" units="Unit('1')" value_type="int64"/> - + diff --git a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml index e7213fc7bd..f6052ccb93 100644 --- a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml @@ -8,129 +8,139 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml index 3ffbbf89e5..c06345ab33 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml @@ -8,138 +8,148 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml index 7091aee748..b9f2a4b496 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml @@ -8,139 +8,148 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cdm/masked_cube.cml b/lib/iris/tests/results/cdm/masked_cube.cml index dcfa8c062f..64663a55fe 100644 --- a/lib/iris/tests/results/cdm/masked_cube.cml +++ b/lib/iris/tests/results/cdm/masked_cube.cml @@ -7,32 +7,30 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt index ffb6a62daf..fa1cd1c04c 100644 --- a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt @@ -8,10 +8,10 @@ air_temperature / (K) (latitude: 73; longitude: 96) pressure 1000.0 hPa time 1998-12-01 00:00:00 Cell methods: - mean longitude (6 minutes, This is a test comment), latitude (12 minutes) - average longitude (6 minutes, This is another test comment), latitude (15 minutes, This is another comment) - average longitude, latitude - percentile longitude (6 minutes, This is another test comment) + 0 longitude: latitude: mean (interval: 6 minutes interval: 12 minutes comment: This is a test comment) + 1 longitude: latitude: average (interval: 6 minutes interval: 15 minutes comment: This is another test comment comment: This is another comment) + 2 longitude: latitude: average + 3 longitude: percentile (interval: 6 minutes comment: This is another test comment) Attributes: STASH m01s16i203 source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/test_simple_cube_intersection.cml b/lib/iris/tests/results/cdm/test_simple_cube_intersection.cml index 8d1b986397..c4d2c6dd81 100644 --- a/lib/iris/tests/results/cdm/test_simple_cube_intersection.cml +++ b/lib/iris/tests/results/cdm/test_simple_cube_intersection.cml @@ -3,12 +3,12 @@ - + - + @@ -22,12 +22,12 @@ - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d.cml b/lib/iris/tests/results/concatenate/concat_2x2d.cml index feeb553642..cd4fd537ff 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d.cml @@ -3,16 +3,16 @@ - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x.cml index 9076ae2538..894f4df52c 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x.cml @@ -3,19 +3,19 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_bounds.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_bounds.cml index 5597a876b2..07e66e82b6 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_bounds.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_bounds.cml @@ -3,24 +3,24 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_xy.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_xy.cml index 4c5c993b9e..37330ba58b 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_xy.cml @@ -3,23 +3,23 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y.cml index 2ace2a8024..51326ca74b 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y.cml @@ -3,22 +3,22 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y_xy.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y_xy.cml index e0f1fd2775..fa5b41299a 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_x_y_xy.cml @@ -3,26 +3,26 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy.cml index 5bc3c707f7..fac46bb54d 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy.cml @@ -3,20 +3,20 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy_bounds.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy_bounds.cml index 4f279cef01..d947bb394d 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy_bounds.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_xy_bounds.cml @@ -3,26 +3,26 @@ - + - + [[201, 202, 203, 204], + [301, 302, 303, 304], + [202, 203, 204, 205], + [302, 303, 304, 305]]]" id="af0ab254" long_name="xy-aux" points="[[ 1., 101., 2., 102.], + [201., 301., 202., 302.]]" shape="(2, 4)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_y.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_y.cml index 95575d1b65..4d66e2e2d5 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_y.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_y.cml @@ -3,19 +3,19 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2x2d_aux_y_xy.cml b/lib/iris/tests/results/concatenate/concat_2x2d_aux_y_xy.cml index dbe28f6a65..e55016f80a 100644 --- a/lib/iris/tests/results/concatenate/concat_2x2d_aux_y_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2x2d_aux_y_xy.cml @@ -3,23 +3,23 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d.cml b/lib/iris/tests/results/concatenate/concat_2y2d.cml index 55a896c12a..bdf2c04c91 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d.cml @@ -3,16 +3,16 @@ - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x.cml index 6e8e367501..55d9978911 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x.cml @@ -3,19 +3,19 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_xy.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_xy.cml index 20ce15e486..cdfed95f4e 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_xy.cml @@ -3,27 +3,27 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y.cml index f486652592..91bbdc381e 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y.cml @@ -3,22 +3,22 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y_xy.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y_xy.cml index cc1377cfd0..6e747200da 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_x_y_xy.cml @@ -3,30 +3,30 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_xy.cml index 4e4a8d8729..ab85674486 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_xy.cml @@ -3,24 +3,24 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_y.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_y.cml index 73a11c74a8..40b1d6bbe9 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_y.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_y.cml @@ -3,19 +3,19 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_2y2d_aux_y_xy.cml b/lib/iris/tests/results/concatenate/concat_2y2d_aux_y_xy.cml index 8add7084dc..17af2b0653 100644 --- a/lib/iris/tests/results/concatenate/concat_2y2d_aux_y_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_2y2d_aux_y_xy.cml @@ -3,27 +3,27 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_3d_simple.cml b/lib/iris/tests/results/concatenate/concat_3d_simple.cml index 67c7cc4376..9afb0fd9dd 100644 --- a/lib/iris/tests/results/concatenate/concat_3d_simple.cml +++ b/lib/iris/tests/results/concatenate/concat_3d_simple.cml @@ -3,70 +3,70 @@ - + - + - + - + [[4000., 5000., 4000., 5000.], + [6000., 7000., 6000., 7000.], + [4000., 5000., 4000., 5000.], + [6000., 7000., 6000., 7000.]]]" shape="(4, 4, 4)" units="Unit('1')" value_type="float32"/> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_4mix2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_4mix2d_aux_xy.cml index b706f7b3cb..e53113e840 100644 --- a/lib/iris/tests/results/concatenate/concat_4mix2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_4mix2d_aux_xy.cml @@ -3,22 +3,22 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_4x2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_4x2d_aux_xy.cml index 229281f88c..67bba81710 100644 --- a/lib/iris/tests/results/concatenate/concat_4x2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_4x2d_aux_xy.cml @@ -3,22 +3,22 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_4y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_4y2d_aux_xy.cml index bf9ee0a610..9efc2ab088 100644 --- a/lib/iris/tests/results/concatenate/concat_4y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_4y2d_aux_xy.cml @@ -3,22 +3,22 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_9mix2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_9mix2d_aux_xy.cml index 636d7ad06d..c29783bca6 100644 --- a/lib/iris/tests/results/concatenate/concat_9mix2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_9mix2d_aux_xy.cml @@ -3,28 +3,28 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_9x2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_9x2d_aux_xy.cml index dea24c5518..1fd9d843c5 100644 --- a/lib/iris/tests/results/concatenate/concat_9x2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_9x2d_aux_xy.cml @@ -3,28 +3,28 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_9y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_9y2d_aux_xy.cml index ed4b23ce08..1d62fae473 100644 --- a/lib/iris/tests/results/concatenate/concat_9y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_9y2d_aux_xy.cml @@ -3,28 +3,28 @@ - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_anonymous.cml b/lib/iris/tests/results/concatenate/concat_anonymous.cml index 7eeccb2241..c5f986cdcd 100644 --- a/lib/iris/tests/results/concatenate/concat_anonymous.cml +++ b/lib/iris/tests/results/concatenate/concat_anonymous.cml @@ -3,14 +3,14 @@ - + - + @@ -19,8 +19,8 @@ - + diff --git a/lib/iris/tests/results/concatenate/concat_masked_2x2d.cml b/lib/iris/tests/results/concatenate/concat_masked_2x2d.cml index f8b47f9627..6b25ac8259 100644 --- a/lib/iris/tests/results/concatenate/concat_masked_2x2d.cml +++ b/lib/iris/tests/results/concatenate/concat_masked_2x2d.cml @@ -3,14 +3,14 @@ - + - + diff --git a/lib/iris/tests/results/concatenate/concat_masked_2y2d.cml b/lib/iris/tests/results/concatenate/concat_masked_2y2d.cml index d4f31c7e44..86460bc8a9 100644 --- a/lib/iris/tests/results/concatenate/concat_masked_2y2d.cml +++ b/lib/iris/tests/results/concatenate/concat_masked_2y2d.cml @@ -3,14 +3,14 @@ - + - + diff --git a/lib/iris/tests/results/concatenate/concat_merged_scalar_4mix2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_merged_scalar_4mix2d_aux_xy.cml index 645d0aa95f..77143dc05a 100644 --- a/lib/iris/tests/results/concatenate/concat_merged_scalar_4mix2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_merged_scalar_4mix2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_merged_scalar_4x2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_merged_scalar_4x2d_aux_xy.cml index 645d0aa95f..77143dc05a 100644 --- a/lib/iris/tests/results/concatenate/concat_merged_scalar_4x2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_merged_scalar_4x2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_merged_scalar_4y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_merged_scalar_4y2d_aux_xy.cml index 94bcb31795..8f72b0339e 100644 --- a/lib/iris/tests/results/concatenate/concat_merged_scalar_4y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_merged_scalar_4y2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4mix2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4mix2d_aux_xy.cml index 1b60930a09..3c078ffbcc 100644 --- a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4mix2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4mix2d_aux_xy.cml @@ -3,19 +3,19 @@ - + - + - + - + @@ -24,19 +24,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> @@ -45,19 +45,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + - + @@ -66,19 +66,19 @@ - + - + - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> diff --git a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4x2_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4x2_aux_xy.cml index 1f87f5b3cf..db474a8d40 100644 --- a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4x2_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4x2_aux_xy.cml @@ -3,19 +3,19 @@ - + - + - + - + @@ -24,19 +24,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + - + @@ -45,19 +45,19 @@ - + - + - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> @@ -66,19 +66,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> diff --git a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4y2d_aux_xy.cml index cca6094d9c..7cef64ff1e 100644 --- a/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_pre_merged_scalar_4y2d_aux_xy.cml @@ -3,19 +3,19 @@ - + - + - + - + @@ -24,19 +24,19 @@ - + - + - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> @@ -45,19 +45,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + - + @@ -66,19 +66,19 @@ - + + [2.5, 3.5]]" id="78a0dfe8" long_name="x" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + + [2.5, 3.5]]" id="6fdbcbab" long_name="y" points="[2., 3.]" shape="(2,)" units="Unit('1')" value_type="float32"/> diff --git a/lib/iris/tests/results/concatenate/concat_scalar_4mix2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_scalar_4mix2d_aux_xy.cml index 8709ebd03d..6f2924b86a 100644 --- a/lib/iris/tests/results/concatenate/concat_scalar_4mix2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_scalar_4mix2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + @@ -30,25 +30,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_scalar_4x2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_scalar_4x2d_aux_xy.cml index 8709ebd03d..6f2924b86a 100644 --- a/lib/iris/tests/results/concatenate/concat_scalar_4x2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_scalar_4x2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + @@ -30,25 +30,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/concatenate/concat_scalar_4y2d_aux_xy.cml b/lib/iris/tests/results/concatenate/concat_scalar_4y2d_aux_xy.cml index 864e476e97..d5b2573933 100644 --- a/lib/iris/tests/results/concatenate/concat_scalar_4y2d_aux_xy.cml +++ b/lib/iris/tests/results/concatenate/concat_scalar_4y2d_aux_xy.cml @@ -3,25 +3,25 @@ - + - + - + - + @@ -30,25 +30,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/all_10_load_match.cml b/lib/iris/tests/results/constrained_load/all_10_load_match.cml index 0712af20fa..7be771967b 100644 --- a/lib/iris/tests/results/constrained_load/all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_10_load_match.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + @@ -59,25 +61,27 @@ - + - + - + - + - + @@ -89,10 +93,10 @@ - + - + @@ -110,26 +114,27 @@ - + - + - + - + - + @@ -141,10 +146,10 @@ - + - + @@ -162,26 +167,27 @@ - + - + - + - + - + @@ -193,10 +199,10 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml index 20971021ac..44f796f630 100644 --- a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml @@ -8,26 +8,28 @@ - + - + - + - + - + @@ -39,11 +41,11 @@ - + - + @@ -61,26 +63,28 @@ - + - + - + - + - + @@ -92,11 +96,11 @@ - + - + @@ -114,27 +118,28 @@ - + - + - + - + - + @@ -146,11 +151,11 @@ - + - + @@ -168,27 +173,28 @@ - + - + - + - + - + @@ -200,11 +206,11 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/attribute_constraint.cml b/lib/iris/tests/results/constrained_load/attribute_constraint.cml index 664dc943bc..53529dc684 100644 --- a/lib/iris/tests/results/constrained_load/attribute_constraint.cml +++ b/lib/iris/tests/results/constrained_load/attribute_constraint.cml @@ -9,129 +9,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml index 44e7d077df..2440c89883 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + @@ -59,63 +61,67 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml index 44e7d077df..2440c89883 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + @@ -59,63 +61,67 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml index e2852d0151..4aee6bb065 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml index e2852d0151..4aee6bb065 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml index 772929b0da..02bee172aa 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + @@ -148,25 +159,27 @@ - + - + - + - + - + @@ -178,10 +191,10 @@ - + - + @@ -199,25 +212,27 @@ - + - + - + - + - + @@ -229,10 +244,10 @@ - + - + @@ -250,26 +265,27 @@ - + - + - + - + - + @@ -281,10 +297,10 @@ - + - + @@ -302,26 +318,27 @@ - + - + - + - + - + @@ -333,10 +350,10 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml index 0e23de090c..5b677ef97c 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + @@ -148,25 +159,27 @@ - + - + - + - + - + @@ -178,10 +191,10 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml index a175652c30..7f315fe394 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + @@ -148,129 +159,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml index a175652c30..7f315fe394 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + @@ -148,129 +159,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml index 0048a742a6..ba900827e6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml @@ -8,63 +8,67 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml index 0048a742a6..ba900827e6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml @@ -8,63 +8,67 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml index e24937854d..10df942a27 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml @@ -8,129 +8,139 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml index e24937854d..10df942a27 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml @@ -8,129 +8,139 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml index 218bdd6b1c..1a4287f572 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml @@ -8,134 +8,144 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml index 218bdd6b1c..1a4287f572 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml @@ -8,134 +8,144 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_load_match.cml index 0e5b02be51..b9bf8ce411 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_match.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_load_strict.cml index 0e5b02be51..b9bf8ce411 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_strict.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/coord_api/complex.xml b/lib/iris/tests/results/coord_api/complex.xml index 41adb2cb8d..b36307d19d 100644 --- a/lib/iris/tests/results/coord_api/complex.xml +++ b/lib/iris/tests/results/coord_api/complex.xml @@ -1,8 +1,8 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/add_float_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/add_float_expl.xml index 7eaf699f2d..e8d325ba71 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/add_float_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/add_float_expl.xml @@ -1,24 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/add_simple_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/add_simple_expl.xml index 9bedff935b..ab1f6cfd0c 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/add_simple_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/add_simple_expl.xml @@ -1,23 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/divide_simple_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/divide_simple_expl.xml index 28c01a0f00..e09f1102cb 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/divide_simple_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/divide_simple_expl.xml @@ -1,23 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/mult_float_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/mult_float_expl.xml index 77107b9906..6d72a98323 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/mult_float_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/mult_float_expl.xml @@ -1,24 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/multiply_simple_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/multiply_simple_expl.xml index 27f98a31ff..a3f930834e 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/multiply_simple_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/multiply_simple_expl.xml @@ -1,23 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/negate_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/negate_expl.xml index 33a63e1f03..d6a0e606e1 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/negate_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/negate_expl.xml @@ -1,23 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/r_subtract_simple_exl.xml b/lib/iris/tests/results/coord_api/coord_maths/r_subtract_simple_exl.xml index 95b9efef7a..30cae91542 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/r_subtract_simple_exl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/r_subtract_simple_exl.xml @@ -1,23 +1,27 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/right_divide_simple_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/right_divide_simple_expl.xml index 00e932cf21..903f3327ea 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/right_divide_simple_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/right_divide_simple_expl.xml @@ -1,25 +1,25 @@ - + diff --git a/lib/iris/tests/results/coord_api/coord_maths/subtract_simple_expl.xml b/lib/iris/tests/results/coord_api/coord_maths/subtract_simple_expl.xml index 7f9ba5e097..b3416928cc 100644 --- a/lib/iris/tests/results/coord_api/coord_maths/subtract_simple_expl.xml +++ b/lib/iris/tests/results/coord_api/coord_maths/subtract_simple_expl.xml @@ -1,23 +1,27 @@ - + diff --git a/lib/iris/tests/results/coord_api/intersection.xml b/lib/iris/tests/results/coord_api/intersection.xml index 5c48c52872..e013f7c450 100644 --- a/lib/iris/tests/results/coord_api/intersection.xml +++ b/lib/iris/tests/results/coord_api/intersection.xml @@ -1,9 +1,9 @@ - + diff --git a/lib/iris/tests/results/coord_api/intersection_missing.xml b/lib/iris/tests/results/coord_api/intersection_missing.xml index 6336180833..ec0c3ea1a1 100644 --- a/lib/iris/tests/results/coord_api/intersection_missing.xml +++ b/lib/iris/tests/results/coord_api/intersection_missing.xml @@ -1,8 +1,8 @@ - + diff --git a/lib/iris/tests/results/coord_api/intersection_reversed.xml b/lib/iris/tests/results/coord_api/intersection_reversed.xml index b489f95451..41476a8e12 100644 --- a/lib/iris/tests/results/coord_api/intersection_reversed.xml +++ b/lib/iris/tests/results/coord_api/intersection_reversed.xml @@ -1,9 +1,9 @@ + [27.75, 24.75], + [24.75, 21.75], + [21.75, 18.75], + [18.75, 15.75], + [15.75, 12.75], + [12.75, 9.75], + [ 9.75, 6.75]]" id="43cd7f4a" long_name="foo" points="[30., 27., 24., 21., 18., 15., 12., 9.]" shape="(8,)" units="Unit('meter')" value_type="float32"/> diff --git a/lib/iris/tests/results/coord_api/nd_bounds.cml b/lib/iris/tests/results/coord_api/nd_bounds.cml index 5c416af25b..76a5d7b766 100644 --- a/lib/iris/tests/results/coord_api/nd_bounds.cml +++ b/lib/iris/tests/results/coord_api/nd_bounds.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml index 463339e5bc..6a08df021c 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,75 +14,88 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml index a91ea4ce5c..b1c0e8cfbe 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,75 +14,88 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml index f963658910..0e602c0539 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,37 +14,38 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml index 195757a417..c0a0bc52f0 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,37 +14,38 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml index c63c260d25..393215ba2d 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,81 +14,93 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml index d6cc708aa1..5910dc45d5 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,81 +14,93 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml index 23739a1ac5..55da14e115 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,75 +14,88 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml index 817b855512..a264c602db 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,75 +14,88 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml index 29d59ce111..bb4e1fb049 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml @@ -6,45 +6,46 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml index e99d57b816..9569277431 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml @@ -6,45 +6,46 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml index 8e57ec7258..d80344dfde 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml @@ -6,89 +6,101 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml index 67b706e0ae..69e0634b40 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml @@ -6,89 +6,101 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml index d9c1b2a35c..e8d6a5f86f 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,37 +14,38 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml index ceafb3fc67..7c7559fa8a 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,37 +14,38 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml index e5090a3572..f6e7356385 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml @@ -6,45 +6,46 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml index 9e8bdebd4a..e6aff79093 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml @@ -6,45 +6,46 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml index a4e0cc1445..66a68b9323 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml @@ -6,51 +6,51 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml index d442637062..c0c3457e8c 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml @@ -6,51 +6,51 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/original.cml b/lib/iris/tests/results/cube_collapsed/original.cml index 4bc6553dba..9f4cf63b69 100644 --- a/lib/iris/tests/results/cube_collapsed/original.cml +++ b/lib/iris/tests/results/cube_collapsed/original.cml @@ -6,97 +6,110 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml index 788d0d8029..ae104c7b75 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,81 +14,93 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml index b9b74c6b6d..34764da436 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml @@ -6,7 +6,7 @@ - + @@ -14,81 +14,93 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml index 84b4fea150..5badd06034 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml @@ -6,89 +6,101 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml index 128d29a281..4e2ee75b9f 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml @@ -6,89 +6,101 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml index 8c206fe840..100a2c6c5f 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml @@ -6,51 +6,51 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml index 08dc52fca2..99a8f94200 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml @@ -6,51 +6,51 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml index 5fae922867..4e132ccdcf 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml @@ -6,7 +6,7 @@ - + @@ -14,36 +14,36 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml index 454bd29a18..9157d71330 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml @@ -6,44 +6,44 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_io/pickling/cubelist.cml b/lib/iris/tests/results/cube_io/pickling/cubelist.cml index eb839e36e4..c52486b1d0 100644 --- a/lib/iris/tests/results/cube_io/pickling/cubelist.cml +++ b/lib/iris/tests/results/cube_io/pickling/cubelist.cml @@ -8,499 +8,522 @@ - + - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + @@ -509,8 +532,9 @@ - + @@ -524,39 +548,41 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + diff --git a/lib/iris/tests/results/cube_io/pickling/single_cube.cml b/lib/iris/tests/results/cube_io/pickling/single_cube.cml index a025713766..eb3e9d0112 100644 --- a/lib/iris/tests/results/cube_io/pickling/single_cube.cml +++ b/lib/iris/tests/results/cube_io/pickling/single_cube.cml @@ -8,499 +8,522 @@ - + - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + @@ -509,8 +532,9 @@ - + diff --git a/lib/iris/tests/results/cube_io/pickling/theta.cml b/lib/iris/tests/results/cube_io/pickling/theta.cml index 6c69f6ed54..2af2124729 100644 --- a/lib/iris/tests/results/cube_io/pickling/theta.cml +++ b/lib/iris/tests/results/cube_io/pickling/theta.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_io/pp/load/global.cml b/lib/iris/tests/results/cube_io/pp/load/global.cml index a69e633e26..a013add0cb 100644 --- a/lib/iris/tests/results/cube_io/pp/load/global.cml +++ b/lib/iris/tests/results/cube_io/pp/load/global.cml @@ -7,37 +7,63 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_merge/multidim_coord_merge.cml b/lib/iris/tests/results/cube_merge/multidim_coord_merge.cml index 0767f41844..7558d3ccda 100644 --- a/lib/iris/tests/results/cube_merge/multidim_coord_merge.cml +++ b/lib/iris/tests/results/cube_merge/multidim_coord_merge.cml @@ -3,40 +3,40 @@ - + [[10, 20], + [20, 25], + [25, 40], + [40, 60]]]" id="434cbbd8" long_name="bar" points="[[ 2.5, 7.5, 12.5, 17.5], + [10. , 17.5, 27.5, 42.5], + [15. , 22.5, 32.5, 50. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + [[ -5, 10], + [ 10, 18], + [ 18, 55], + [ 18, 70]]]" id="b0d35dcf" long_name="foo" points="[[ -7.5, 7.5, 22.5, 37.5], + [-12.5, 4. , 26.5, 47.5], + [ 2.5, 14. , 36.5, 44. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_merge/multidim_coord_merge_transpose.cml b/lib/iris/tests/results/cube_merge/multidim_coord_merge_transpose.cml index c649dc8569..909e48d08f 100644 --- a/lib/iris/tests/results/cube_merge/multidim_coord_merge_transpose.cml +++ b/lib/iris/tests/results/cube_merge/multidim_coord_merge_transpose.cml @@ -3,40 +3,40 @@ - + [[10, 20], + [20, 25], + [25, 40], + [40, 60]]]" id="434cbbd8" long_name="bar" points="[[ 2.5, 7.5, 12.5, 17.5], + [10. , 17.5, 27.5, 42.5], + [15. , 22.5, 32.5, 50. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + [[ -5, 10], + [ 10, 18], + [ 18, 55], + [ 18, 70]]]" id="b0d35dcf" long_name="foo" points="[[ -7.5, 7.5, 22.5, 37.5], + [-12.5, 4. , 26.5, 47.5], + [ 2.5, 14. , 36.5, 44. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_merge/test_orig_point_cube.cml b/lib/iris/tests/results/cube_merge/test_orig_point_cube.cml index b49da17f26..6c04a1c0fa 100644 --- a/lib/iris/tests/results/cube_merge/test_orig_point_cube.cml +++ b/lib/iris/tests/results/cube_merge/test_orig_point_cube.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_merge/test_simple_attributes1.cml b/lib/iris/tests/results/cube_merge/test_simple_attributes1.cml index 50bf6bea91..1df0771f16 100644 --- a/lib/iris/tests/results/cube_merge/test_simple_attributes1.cml +++ b/lib/iris/tests/results/cube_merge/test_simple_attributes1.cml @@ -6,10 +6,10 @@ - + - + @@ -21,10 +21,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_merge/test_simple_attributes2.cml b/lib/iris/tests/results/cube_merge/test_simple_attributes2.cml index 27d3f573e1..9d5d5a2fbb 100644 --- a/lib/iris/tests/results/cube_merge/test_simple_attributes2.cml +++ b/lib/iris/tests/results/cube_merge/test_simple_attributes2.cml @@ -6,10 +6,10 @@ - + - + @@ -24,10 +24,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_merge/test_simple_attributes3.cml b/lib/iris/tests/results/cube_merge/test_simple_attributes3.cml index 096ceeee50..1d90376068 100644 --- a/lib/iris/tests/results/cube_merge/test_simple_attributes3.cml +++ b/lib/iris/tests/results/cube_merge/test_simple_attributes3.cml @@ -6,10 +6,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_merge/test_simple_bound_merge.cml b/lib/iris/tests/results/cube_merge/test_simple_bound_merge.cml index d39cf419ae..51c76d9ba0 100644 --- a/lib/iris/tests/results/cube_merge/test_simple_bound_merge.cml +++ b/lib/iris/tests/results/cube_merge/test_simple_bound_merge.cml @@ -3,15 +3,15 @@ - + - + diff --git a/lib/iris/tests/results/cube_merge/test_simple_merge.cml b/lib/iris/tests/results/cube_merge/test_simple_merge.cml index 4a89d8044c..84d5a262df 100644 --- a/lib/iris/tests/results/cube_merge/test_simple_merge.cml +++ b/lib/iris/tests/results/cube_merge/test_simple_merge.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml index f272cebeb1..cbc7bdebd9 100644 --- a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml +++ b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml @@ -3,66 +3,66 @@ - + - + + [17, 16], + [15, 14], + [11, 10], + [ 9, 8], + [ 7, 6], + [ 5, 4], + [ 3, 2], + [ 1, 0]]" id="e4dc1958" long_name="dim2" points="[9, 8, 7, 5, 4, 3, 2, 1, 0]" shape="(9,)" units="Unit('meters')" value_type="int32"/> - + [[196, 197, 198, 199], + [192, 193, 194, 195], + [188, 189, 190, 191], + ..., + [168, 169, 170, 171], + [164, 165, 166, 167], + [160, 161, 162, 163]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[ 9, 8, 7, 5, 4, 3, 2, 1, 0], + [19, 18, 17, 15, 14, 13, 12, 11, 10], + [29, 28, 27, 25, 24, 23, 22, 21, 20], + [39, 38, 37, 35, 34, 33, 32, 31, 30], + [49, 48, 47, 45, 44, 43, 42, 41, 40]]" shape="(5, 9)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_orig.cml b/lib/iris/tests/results/cube_slice/2d_orig.cml index 1956d9f324..3758fab7b4 100644 --- a/lib/iris/tests/results/cube_slice/2d_orig.cml +++ b/lib/iris/tests/results/cube_slice/2d_orig.cml @@ -3,67 +3,67 @@ - + - + - + - + [[160, 161, 162, 163], + [164, 165, 166, 167], + [168, 169, 170, 171], + ..., + [188, 189, 190, 191], + [192, 193, 194, 195], + [196, 197, 198, 199]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], + [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], + [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49]]" shape="(5, 10)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_to_0d_cube_slice.cml b/lib/iris/tests/results/cube_slice/2d_to_0d_cube_slice.cml index 2f1a4b80c1..be35d87cfd 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_0d_cube_slice.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_0d_cube_slice.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice.cml b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice.cml index 5c08226ea8..8018fb4a85 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice.cml @@ -3,18 +3,18 @@ - + - + + [2, 3]]" id="e4dc1958" long_name="dim2" points="[0, 1]" shape="(2,)" units="Unit('meters')" value_type="int32"/> + [4, 5, 6, 7]]" id="b5af630a" long_name="my_multi_dim_coord" points="[0, 1]" shape="(2,)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice2.cml b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice2.cml index b6e6bf10e8..34e5984727 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice2.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice2.cml @@ -3,25 +3,25 @@ - + - + + [2, 3], + [6, 7]]" id="e4dc1958" long_name="dim2" points="[0, 1, 3]" shape="(3,)" units="Unit('meters')" value_type="int32"/> - + [[80, 81, 82, 83], + [84, 85, 86, 87], + [92, 93, 94, 95]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[ 0, 1, 3], + [20, 21, 23]]" shape="(2, 3)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice3.cml b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice3.cml index ee69b13b5c..65fc3f97ae 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice3.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_multi_slice3.cml @@ -3,46 +3,46 @@ - + - + - + - + [[ 80, 81, 82, 83], + [ 84, 85, 86, 87], + [ 88, 89, 90, 91], + [ 92, 93, 94, 95], + [ 96, 97, 98, 99], + [100, 101, 102, 103], + [104, 105, 106, 107], + [108, 109, 110, 111], + [112, 113, 114, 115], + [116, 117, 118, 119]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]]" shape="(2, 10)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_slice.cml b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_slice.cml index f3fcc747d7..7cecb9db56 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_1d_cube_slice.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_1d_cube_slice.cml @@ -3,34 +3,34 @@ - + - + - + - + diff --git a/lib/iris/tests/results/cube_slice/2d_to_2d_revesed.cml b/lib/iris/tests/results/cube_slice/2d_to_2d_revesed.cml index 570f09dee3..fad5a7baf2 100644 --- a/lib/iris/tests/results/cube_slice/2d_to_2d_revesed.cml +++ b/lib/iris/tests/results/cube_slice/2d_to_2d_revesed.cml @@ -3,67 +3,67 @@ - + - + + [17, 16], + [15, 14], + [13, 12], + [11, 10], + [ 9, 8], + [ 7, 6], + [ 5, 4], + [ 3, 2], + [ 1, 0]]" id="e4dc1958" long_name="dim2" points="[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]" shape="(10,)" units="Unit('meters')" value_type="int32"/> + [[ 36, 37, 38, 39], + [ 32, 33, 34, 35], + [ 28, 29, 30, 31], + ..., + [ 8, 9, 10, 11], + [ 4, 5, 6, 7], + [ 0, 1, 2, 3]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[49, 48, 47, 46, 45, 44, 43, 42, 41, 40], + [39, 38, 37, 36, 35, 34, 33, 32, 31, 30], + [29, 28, 27, 26, 25, 24, 23, 22, 21, 20], + [19, 18, 17, 16, 15, 14, 13, 12, 11, 10], + [ 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]" shape="(5, 10)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/2d_transposed.cml b/lib/iris/tests/results/cube_slice/2d_transposed.cml index 542f2c0801..1f00c1ccda 100644 --- a/lib/iris/tests/results/cube_slice/2d_transposed.cml +++ b/lib/iris/tests/results/cube_slice/2d_transposed.cml @@ -3,67 +3,67 @@ - + - + - + - + [[160, 161, 162, 163], + [164, 165, 166, 167], + [168, 169, 170, 171], + ..., + [188, 189, 190, 191], + [192, 193, 194, 195], + [196, 197, 198, 199]]]" id="b5af630a" long_name="my_multi_dim_coord" points="[[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], + [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], + [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], + [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49]]" shape="(5, 10)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing1.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing1.cml index f674c30121..bb2e16525e 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing1.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing1.cml @@ -8,10 +8,10 @@ - + - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml index b1bf424a93..da14899cfd 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml @@ -8,10 +8,10 @@ - + - + @@ -19,12 +19,12 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml index 50fd683cb3..f01685ebb9 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml @@ -8,10 +8,10 @@ - + - + @@ -19,12 +19,12 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml index 1563dce74d..a95e3d7acb 100644 --- a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml +++ b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml @@ -8,24 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml index 5b7d800716..d6f3e7902e 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml @@ -3,20 +3,20 @@ - + - + - + - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml index edf4392d30..462ff0a5dd 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml @@ -3,12 +3,12 @@ - + - + diff --git a/lib/iris/tests/results/derived/column.cml b/lib/iris/tests/results/derived/column.cml index 827214dafa..db2ffbfb1f 100644 --- a/lib/iris/tests/results/derived/column.cml +++ b/lib/iris/tests/results/derived/column.cml @@ -6,33 +6,43 @@ - + - + @@ -40,78 +50,91 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/derived/no_orog.cml b/lib/iris/tests/results/derived/no_orog.cml index 844373675e..7588124c06 100644 --- a/lib/iris/tests/results/derived/no_orog.cml +++ b/lib/iris/tests/results/derived/no_orog.cml @@ -6,137 +6,161 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/derived/removed_derived_coord.cml b/lib/iris/tests/results/derived/removed_derived_coord.cml index 5175d88875..b2883fab51 100644 --- a/lib/iris/tests/results/derived/removed_derived_coord.cml +++ b/lib/iris/tests/results/derived/removed_derived_coord.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/derived/removed_orog.cml b/lib/iris/tests/results/derived/removed_orog.cml index 982e38fd1e..07e9060747 100644 --- a/lib/iris/tests/results/derived/removed_orog.cml +++ b/lib/iris/tests/results/derived/removed_orog.cml @@ -6,123 +6,146 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/derived/removed_sigma.cml b/lib/iris/tests/results/derived/removed_sigma.cml index 3908c22188..4cb3d3afcf 100644 --- a/lib/iris/tests/results/derived/removed_sigma.cml +++ b/lib/iris/tests/results/derived/removed_sigma.cml @@ -6,463 +6,532 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + diff --git a/lib/iris/tests/results/derived/transposed.cml b/lib/iris/tests/results/derived/transposed.cml index c44857bd61..d938494204 100644 --- a/lib/iris/tests/results/derived/transposed.cml +++ b/lib/iris/tests/results/derived/transposed.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/basic_orthogonal_cube.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/basic_orthogonal_cube.cml index f06d8d7970..47d9b0388f 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/basic_orthogonal_cube.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/basic_orthogonal_cube.cml @@ -3,14 +3,14 @@ - + - + @@ -22,7 +22,7 @@ - + diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed.cml index b2b33c4276..a725dbd6f2 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed.cml @@ -3,14 +3,14 @@ - + - + @@ -22,7 +22,7 @@ - + diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed_2.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed_2.cml index 422826f7fd..faadbb0a8c 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed_2.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_1d_squashed_2.cml @@ -3,14 +3,14 @@ - + - + @@ -22,7 +22,7 @@ - + diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml index c7200d6106..d9750d1593 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml @@ -3,10 +3,10 @@ - + @@ -28,7 +28,7 @@ - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml index cc9deb4260..a0ed65bd29 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml @@ -6,61 +6,61 @@ - + [[457.2669 , 495.58197], + [462.27734, 500.56924], + [459.3417 , 497.6472 ], + ..., + [408.93216, 447.471 ], + [390.02432, 428.65073], + [363.4833 , 402.23257]]]" id="9041e969" points="[[372.66827, 377.73013, 374.7644 , ..., + 323.83752, 304.73566, 277.92227], + [388.6288 , 393.68097, 390.72092, ..., + 339.89185, 320.8267 , 294.0648 ], + [410.97473, 416.0133 , 413.0612 , ..., + 362.3689 , 343.35504, 316.66516], + [439.70715, 444.72824, 441.78638, ..., + 391.26965, 372.32166, 345.7242 ], + [474.8275 , 479.82733, 476.89795, ..., + 426.5954 , 407.72775, 381.24307]]" shape="(5, 18)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + @@ -68,38 +68,39 @@ - + - + @@ -113,20 +114,22 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml index fb3d2cdbcf..efe8c37e2c 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml @@ -6,99 +6,98 @@ - + [[474.23618, 512.47266], + [465.1682 , 503.44672], + [422.9535 , 461.42743], + ..., + [488.32468, 526.496 ], + [500.70358, 538.81757], + [473.8021 , 512.04065]]]" id="9041e969" points="[[389.81168, 380.65067, 338.00275, 295.52823, + 278.92233, 280.88892, 327.14597, 342.9571 , + 368.656 , 404.04477, 416.55072, 389.37317], + [405.73932, 396.5959 , 354.02988, 311.63693, + 295.06293, 297.02573, 343.19394, 358.9747 , + 384.62427, 419.94507, 432.427 , 405.30164], + [428.03918, 418.92038, 376.46887, 334.19003, + 317.6606 , 319.61813, 365.6621 , 381.40042, + 406.98096, 442.20673, 454.65506, 427.60266], + [456.71246, 447.62524, 405.3208 , 363.1884 , + 346.71622, 348.66696, 394.55145, 410.23526, + 435.72717, 470.83093, 483.23618, 456.27747], + [491.76077, 482.71207, 440.58698, 398.63318, + 382.23087, 384.17334, 429.8633 , 445.4806 , + 470.86444, 505.81937, 518.172 , 491.3276 ]]" shape="(5, 12)" standard_name="altitude" units="Unit('m')" value_type="float32"> - + - + - + - + @@ -112,19 +111,20 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/higher.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/higher.cml index 6c7779ff9f..7cfcf8b91f 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/higher.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/higher.cml @@ -3,41 +3,42 @@ - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml index 70df0e198d..efe348ffcb 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml @@ -6,394 +6,398 @@ - + - + - + - + - + @@ -407,29 +411,30 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/latlonreduced.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/latlonreduced.cml index 803a54ce67..80c60cc244 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/latlonreduced.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/latlonreduced.cml @@ -3,14 +3,14 @@ - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lonhalved.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lonhalved.cml index fb937c1729..a5dbd5ead9 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lonhalved.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lonhalved.cml @@ -3,15 +3,15 @@ - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lower.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lower.cml index c324683476..c32ee6bf0f 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lower.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/lower.cml @@ -3,15 +3,15 @@ - + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/simple.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/simple.cml index fe54eb19cf..25850a36d9 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/simple.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/simple.cml @@ -3,17 +3,17 @@ - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml index 7422bfe044..8acc98033b 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml @@ -14,27 +14,27 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml index f9e0511ccb..68d933dd4d 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml @@ -14,46 +14,44 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml index 2fb8b6e1f0..9c34b32abf 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml @@ -14,38 +14,37 @@ - + - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml index 9a819eee9e..4ba4104fb4 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml @@ -14,38 +14,38 @@ - + - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml index 9133d98e73..a89407d6a2 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml @@ -14,27 +14,27 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml index 05aeab9ccb..d2efe47816 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml @@ -14,27 +14,27 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml index 9dc3e08ee6..69e3233576 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml @@ -14,27 +14,27 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml index 7bb47c5296..defd705298 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml @@ -14,27 +14,27 @@ - + - + - + diff --git a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml index 8ccd602c11..3d6068393e 100644 --- a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml +++ b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml @@ -14,27 +14,27 @@ - - - - + + + + - + @@ -65,27 +65,27 @@ - - - - + + + + - + @@ -116,27 +116,27 @@ - - - - + + + + - + @@ -167,27 +167,27 @@ - - - - + + + + - + @@ -218,27 +218,27 @@ - - - - + + + + - + @@ -269,27 +269,27 @@ - - - - + + + + - + @@ -320,27 +320,27 @@ - - - - + + + + - + @@ -371,27 +371,27 @@ - - - - + + + + - + @@ -422,27 +422,27 @@ - - - - + + + + - + @@ -473,27 +473,27 @@ - - - - + + + + - + @@ -524,27 +524,27 @@ - - - - + + + + - + @@ -575,27 +575,27 @@ - - - - + + + + - + @@ -626,27 +626,27 @@ - - - - + + + + - + @@ -677,27 +677,27 @@ - - - - + + + + - + @@ -728,27 +728,27 @@ - - - - + + + + - + @@ -779,27 +779,27 @@ - - - - + + + + - + @@ -830,27 +830,27 @@ - - - - + + + + - + @@ -881,27 +881,27 @@ - - - - + + + + - + diff --git a/lib/iris/tests/results/file_load/known_loaders.txt b/lib/iris/tests/results/file_load/known_loaders.txt index 9b0a074574..98ac3e4a07 100644 --- a/lib/iris/tests/results/file_load/known_loaders.txt +++ b/lib/iris/tests/results/file_load/known_loaders.txt @@ -4,6 +4,7 @@ * NetCDF 64 bit offset format (priority 5) * NetCDF_v4 (priority 5) * UM Post Processing file (PP) (priority 5) + * NetCDF dataset (priority 4) * UM Fieldsfile (FF) post v5.2 (priority 4) * ABF (priority 3) * ABL (priority 3) diff --git a/lib/iris/tests/results/file_load/theta_levels.cml b/lib/iris/tests/results/file_load/theta_levels.cml index fc708b7949..ce89181db6 100644 --- a/lib/iris/tests/results/file_load/theta_levels.cml +++ b/lib/iris/tests/results/file_load/theta_levels.cml @@ -8,25 +8,27 @@ - + - + - + - + - + @@ -38,10 +40,10 @@ - + - + @@ -59,25 +61,27 @@ - + - + - + - + - + @@ -89,10 +93,10 @@ - + - + @@ -110,25 +114,27 @@ - + - + - + - + - + @@ -140,10 +146,10 @@ - + - + @@ -161,25 +167,27 @@ - + - + - + - + - + @@ -191,10 +199,10 @@ - + - + @@ -212,25 +220,27 @@ - + - + - + - + - + @@ -242,10 +252,10 @@ - + - + @@ -263,25 +273,27 @@ - + - + - + - + - + @@ -293,10 +305,10 @@ - + - + @@ -314,25 +326,27 @@ - + - + - + - + - + @@ -344,10 +358,10 @@ - + - + @@ -365,25 +379,27 @@ - + - + - + - + - + @@ -395,10 +411,10 @@ - + - + @@ -416,25 +432,27 @@ - + - + - + - + - + @@ -446,10 +464,10 @@ - + - + @@ -467,25 +485,27 @@ - + - + - + - + - + @@ -497,10 +517,10 @@ - + - + @@ -518,25 +538,27 @@ - + - + - + - + - + @@ -548,10 +570,10 @@ - + - + @@ -569,25 +591,27 @@ - + - + - + - + - + @@ -599,10 +623,10 @@ - + - + @@ -620,25 +644,27 @@ - + - + - + - + - + @@ -650,10 +676,10 @@ - + - + @@ -671,25 +697,27 @@ - + - + - + - + - + @@ -701,10 +729,10 @@ - + - + @@ -722,25 +750,27 @@ - + - + - + - + - + @@ -752,10 +782,10 @@ - + - + @@ -773,25 +803,27 @@ - + - + - + - + - + @@ -803,10 +835,10 @@ - + - + @@ -824,25 +856,27 @@ - + - + - + - + - + @@ -854,10 +888,10 @@ - + - + @@ -875,25 +909,27 @@ - + - + - + - + - + @@ -905,10 +941,10 @@ - + - + @@ -926,25 +962,27 @@ - + - + - + - + - + @@ -956,10 +994,10 @@ - + - + @@ -977,25 +1015,27 @@ - + - + - + - + - + @@ -1007,10 +1047,10 @@ - + - + @@ -1028,25 +1068,27 @@ - + - + - + - + - + @@ -1058,10 +1100,10 @@ - + - + @@ -1079,25 +1121,27 @@ - + - + - + - + - + @@ -1109,10 +1153,10 @@ - + - + @@ -1130,25 +1174,27 @@ - + - + - + - + - + @@ -1160,10 +1206,10 @@ - + - + @@ -1181,25 +1227,27 @@ - + - + - + - + - + @@ -1211,10 +1259,10 @@ - + - + @@ -1232,25 +1280,27 @@ - + - + - + - + - + @@ -1262,10 +1312,10 @@ - + - + @@ -1283,25 +1333,27 @@ - + - + - + - + - + @@ -1313,10 +1365,10 @@ - + - + @@ -1334,25 +1386,27 @@ - + - + - + - + - + @@ -1364,10 +1418,10 @@ - + - + @@ -1385,25 +1439,27 @@ - + - + - + - + - + @@ -1415,10 +1471,10 @@ - + - + @@ -1436,25 +1492,27 @@ - + - + - + - + - + @@ -1466,10 +1524,10 @@ - + - + @@ -1487,25 +1545,27 @@ - + - + - + - + - + @@ -1517,10 +1577,10 @@ - + - + @@ -1538,25 +1598,27 @@ - + - + - + - + - + @@ -1568,10 +1630,10 @@ - + - + @@ -1589,25 +1651,27 @@ - + - + - + - + - + @@ -1619,10 +1683,10 @@ - + - + @@ -1640,25 +1704,27 @@ - + - + - + - + - + @@ -1670,10 +1736,10 @@ - + - + @@ -1691,25 +1757,27 @@ - + - + - + - + - + @@ -1721,10 +1789,10 @@ - + - + @@ -1742,25 +1810,27 @@ - + - + - + - + - + @@ -1772,10 +1842,10 @@ - + - + @@ -1793,25 +1863,27 @@ - + - + - + - + - + @@ -1823,10 +1895,10 @@ - + - + @@ -1844,25 +1916,27 @@ - + - + - + - + - + @@ -1874,10 +1948,10 @@ - + - + @@ -1895,25 +1969,27 @@ - + - + - + - + - + @@ -1925,10 +2001,10 @@ - + - + diff --git a/lib/iris/tests/results/file_load/u_wind_levels.cml b/lib/iris/tests/results/file_load/u_wind_levels.cml index 5d1af58f6c..c06f3f37ad 100644 --- a/lib/iris/tests/results/file_load/u_wind_levels.cml +++ b/lib/iris/tests/results/file_load/u_wind_levels.cml @@ -8,26 +8,27 @@ - + - + - + - + - + @@ -39,10 +40,10 @@ - + - + @@ -60,26 +61,27 @@ - + - + - + - + - + @@ -91,10 +93,10 @@ - + - + @@ -112,26 +114,27 @@ - + - + - + - + - + @@ -143,10 +146,10 @@ - + - + @@ -164,26 +167,27 @@ - + - + - + - + - + @@ -195,10 +199,10 @@ - + - + @@ -216,26 +220,27 @@ - + - + - + - + - + @@ -247,10 +252,10 @@ - + - + @@ -268,26 +273,27 @@ - + - + - + - + - + @@ -299,10 +305,10 @@ - + - + @@ -320,26 +326,27 @@ - + - + - + - + - + @@ -351,10 +358,10 @@ - + - + @@ -372,26 +379,27 @@ - + - + - + - + - + @@ -403,10 +411,10 @@ - + - + @@ -424,26 +432,27 @@ - + - + - + - + - + @@ -455,10 +464,10 @@ - + - + @@ -476,26 +485,27 @@ - + - + - + - + - + @@ -507,10 +517,10 @@ - + - + @@ -528,26 +538,27 @@ - + - + - + - + - + @@ -559,10 +570,10 @@ - + - + @@ -580,26 +591,27 @@ - + - + - + - + - + @@ -611,10 +623,10 @@ - + - + @@ -632,26 +644,27 @@ - + - + - + - + - + @@ -663,10 +676,10 @@ - + - + @@ -684,26 +697,27 @@ - + - + - + - + - + @@ -715,10 +729,10 @@ - + - + @@ -736,26 +750,27 @@ - + - + - + - + - + @@ -767,10 +782,10 @@ - + - + @@ -788,26 +803,27 @@ - + - + - + - + - + @@ -819,10 +835,10 @@ - + - + @@ -840,26 +856,27 @@ - + - + - + - + - + @@ -871,10 +888,10 @@ - + - + @@ -892,26 +909,27 @@ - + - + - + - + - + @@ -923,10 +941,10 @@ - + - + @@ -944,26 +962,27 @@ - + - + - + - + - + @@ -975,10 +994,10 @@ - + - + @@ -996,26 +1015,27 @@ - + - + - + - + - + @@ -1027,10 +1047,10 @@ - + - + @@ -1048,26 +1068,27 @@ - + - + - + - + - + @@ -1079,10 +1100,10 @@ - + - + @@ -1100,26 +1121,27 @@ - + - + - + - + - + @@ -1131,10 +1153,10 @@ - + - + @@ -1152,26 +1174,27 @@ - + - + - + - + - + @@ -1183,10 +1206,10 @@ - + - + @@ -1204,26 +1227,27 @@ - + - + - + - + - + @@ -1235,10 +1259,10 @@ - + - + @@ -1256,26 +1280,27 @@ - + - + - + - + - + @@ -1287,10 +1312,10 @@ - + - + @@ -1308,26 +1333,27 @@ - + - + - + - + - + @@ -1339,10 +1365,10 @@ - + - + @@ -1360,26 +1386,27 @@ - + - + - + - + - + @@ -1391,10 +1418,10 @@ - + - + @@ -1412,26 +1439,27 @@ - + - + - + - + - + @@ -1443,10 +1471,10 @@ - + - + @@ -1464,26 +1492,27 @@ - + - + - + - + - + @@ -1495,10 +1524,10 @@ - + - + @@ -1516,26 +1545,27 @@ - + - + - + - + - + @@ -1547,10 +1577,10 @@ - + - + @@ -1568,26 +1598,27 @@ - + - + - + - + - + @@ -1599,10 +1630,10 @@ - + - + @@ -1620,26 +1651,27 @@ - + - + - + - + - + @@ -1651,10 +1683,10 @@ - + - + @@ -1672,26 +1704,27 @@ - + - + - + - + - + @@ -1703,10 +1736,10 @@ - + - + @@ -1724,26 +1757,27 @@ - + - + - + - + - + @@ -1755,10 +1789,10 @@ - + - + @@ -1776,26 +1810,27 @@ - + - + - + - + - + @@ -1807,10 +1842,10 @@ - + - + @@ -1828,26 +1863,27 @@ - + - + - + - + - + @@ -1859,10 +1895,10 @@ - + - + @@ -1880,26 +1916,27 @@ - + - + - + - + - + @@ -1911,10 +1948,10 @@ - + - + @@ -1932,26 +1969,27 @@ - + - + - + - + - + @@ -1963,10 +2001,10 @@ - + - + diff --git a/lib/iris/tests/results/file_load/v_wind_levels.cml b/lib/iris/tests/results/file_load/v_wind_levels.cml index c7145a7e9e..59a8663235 100644 --- a/lib/iris/tests/results/file_load/v_wind_levels.cml +++ b/lib/iris/tests/results/file_load/v_wind_levels.cml @@ -8,26 +8,27 @@ - + - + - + - + - + @@ -39,10 +40,10 @@ - + - + @@ -60,26 +61,27 @@ - + - + - + - + - + @@ -91,10 +93,10 @@ - + - + @@ -112,26 +114,27 @@ - + - + - + - + - + @@ -143,10 +146,10 @@ - + - + @@ -164,26 +167,27 @@ - + - + - + - + - + @@ -195,10 +199,10 @@ - + - + @@ -216,26 +220,27 @@ - + - + - + - + - + @@ -247,10 +252,10 @@ - + - + @@ -268,26 +273,27 @@ - + - + - + - + - + @@ -299,10 +305,10 @@ - + - + @@ -320,26 +326,27 @@ - + - + - + - + - + @@ -351,10 +358,10 @@ - + - + @@ -372,26 +379,27 @@ - + - + - + - + - + @@ -403,10 +411,10 @@ - + - + @@ -424,26 +432,27 @@ - + - + - + - + - + @@ -455,10 +464,10 @@ - + - + @@ -476,26 +485,27 @@ - + - + - + - + - + @@ -507,10 +517,10 @@ - + - + @@ -528,26 +538,27 @@ - + - + - + - + - + @@ -559,10 +570,10 @@ - + - + @@ -580,26 +591,27 @@ - + - + - + - + - + @@ -611,10 +623,10 @@ - + - + @@ -632,26 +644,27 @@ - + - + - + - + - + @@ -663,10 +676,10 @@ - + - + @@ -684,26 +697,27 @@ - + - + - + - + - + @@ -715,10 +729,10 @@ - + - + @@ -736,26 +750,27 @@ - + - + - + - + - + @@ -767,10 +782,10 @@ - + - + @@ -788,26 +803,27 @@ - + - + - + - + - + @@ -819,10 +835,10 @@ - + - + @@ -840,26 +856,27 @@ - + - + - + - + - + @@ -871,10 +888,10 @@ - + - + @@ -892,26 +909,27 @@ - + - + - + - + - + @@ -923,10 +941,10 @@ - + - + @@ -944,26 +962,27 @@ - + - + - + - + - + @@ -975,10 +994,10 @@ - + - + @@ -996,26 +1015,27 @@ - + - + - + - + - + @@ -1027,10 +1047,10 @@ - + - + @@ -1048,26 +1068,27 @@ - + - + - + - + - + @@ -1079,10 +1100,10 @@ - + - + @@ -1100,26 +1121,27 @@ - + - + - + - + - + @@ -1131,10 +1153,10 @@ - + - + @@ -1152,26 +1174,27 @@ - + - + - + - + - + @@ -1183,10 +1206,10 @@ - + - + @@ -1204,26 +1227,27 @@ - + - + - + - + - + @@ -1235,10 +1259,10 @@ - + - + @@ -1256,26 +1280,27 @@ - + - + - + - + - + @@ -1287,10 +1312,10 @@ - + - + @@ -1308,26 +1333,27 @@ - + - + - + - + - + @@ -1339,10 +1365,10 @@ - + - + @@ -1360,26 +1386,27 @@ - + - + - + - + - + @@ -1391,10 +1418,10 @@ - + - + @@ -1412,26 +1439,27 @@ - + - + - + - + - + @@ -1443,10 +1471,10 @@ - + - + @@ -1464,26 +1492,27 @@ - + - + - + - + - + @@ -1495,10 +1524,10 @@ - + - + @@ -1516,26 +1545,27 @@ - + - + - + - + - + @@ -1547,10 +1577,10 @@ - + - + @@ -1568,26 +1598,27 @@ - + - + - + - + - + @@ -1599,10 +1630,10 @@ - + - + @@ -1620,26 +1651,27 @@ - + - + - + - + - + @@ -1651,10 +1683,10 @@ - + - + @@ -1672,26 +1704,27 @@ - + - + - + - + - + @@ -1703,10 +1736,10 @@ - + - + @@ -1724,26 +1757,27 @@ - + - + - + - + - + @@ -1755,10 +1789,10 @@ - + - + @@ -1776,26 +1810,27 @@ - + - + - + - + - + @@ -1807,10 +1842,10 @@ - + - + @@ -1828,26 +1863,27 @@ - + - + - + - + - + @@ -1859,10 +1895,10 @@ - + - + @@ -1880,26 +1916,27 @@ - + - + - + - + - + @@ -1911,10 +1948,10 @@ - + - + @@ -1932,26 +1969,27 @@ - + - + - + - + - + @@ -1963,10 +2001,10 @@ - + - + diff --git a/lib/iris/tests/results/file_load/wind_levels.cml b/lib/iris/tests/results/file_load/wind_levels.cml index 33584deec6..1458e62649 100644 --- a/lib/iris/tests/results/file_load/wind_levels.cml +++ b/lib/iris/tests/results/file_load/wind_levels.cml @@ -8,26 +8,27 @@ - + - + - + - + - + @@ -39,10 +40,10 @@ - + - + @@ -60,26 +61,27 @@ - + - + - + - + - + @@ -91,10 +93,10 @@ - + - + @@ -112,26 +114,27 @@ - + - + - + - + - + @@ -143,10 +146,10 @@ - + - + @@ -164,26 +167,27 @@ - + - + - + - + - + @@ -195,10 +199,10 @@ - + - + @@ -216,26 +220,27 @@ - + - + - + - + - + @@ -247,10 +252,10 @@ - + - + @@ -268,26 +273,27 @@ - + - + - + - + - + @@ -299,10 +305,10 @@ - + - + @@ -320,26 +326,27 @@ - + - + - + - + - + @@ -351,10 +358,10 @@ - + - + @@ -372,26 +379,27 @@ - + - + - + - + - + @@ -403,10 +411,10 @@ - + - + @@ -424,26 +432,27 @@ - + - + - + - + - + @@ -455,10 +464,10 @@ - + - + @@ -476,26 +485,27 @@ - + - + - + - + - + @@ -507,10 +517,10 @@ - + - + @@ -528,26 +538,27 @@ - + - + - + - + - + @@ -559,10 +570,10 @@ - + - + @@ -580,26 +591,27 @@ - + - + - + - + - + @@ -611,10 +623,10 @@ - + - + @@ -632,26 +644,27 @@ - + - + - + - + - + @@ -663,10 +676,10 @@ - + - + @@ -684,26 +697,27 @@ - + - + - + - + - + @@ -715,10 +729,10 @@ - + - + @@ -736,26 +750,27 @@ - + - + - + - + - + @@ -767,10 +782,10 @@ - + - + @@ -788,26 +803,27 @@ - + - + - + - + - + @@ -819,10 +835,10 @@ - + - + @@ -840,26 +856,27 @@ - + - + - + - + - + @@ -871,10 +888,10 @@ - + - + @@ -892,26 +909,27 @@ - + - + - + - + - + @@ -923,10 +941,10 @@ - + - + @@ -944,26 +962,27 @@ - + - + - + - + - + @@ -975,10 +994,10 @@ - + - + @@ -996,26 +1015,27 @@ - + - + - + - + - + @@ -1027,10 +1047,10 @@ - + - + @@ -1048,26 +1068,27 @@ - + - + - + - + - + @@ -1079,10 +1100,10 @@ - + - + @@ -1100,26 +1121,27 @@ - + - + - + - + - + @@ -1131,10 +1153,10 @@ - + - + @@ -1152,26 +1174,27 @@ - + - + - + - + - + @@ -1183,10 +1206,10 @@ - + - + @@ -1204,26 +1227,27 @@ - + - + - + - + - + @@ -1235,10 +1259,10 @@ - + - + @@ -1256,26 +1280,27 @@ - + - + - + - + - + @@ -1287,10 +1312,10 @@ - + - + @@ -1308,26 +1333,27 @@ - + - + - + - + - + @@ -1339,10 +1365,10 @@ - + - + @@ -1360,26 +1386,27 @@ - + - + - + - + - + @@ -1391,10 +1418,10 @@ - + - + @@ -1412,26 +1439,27 @@ - + - + - + - + - + @@ -1443,10 +1471,10 @@ - + - + @@ -1464,26 +1492,27 @@ - + - + - + - + - + @@ -1495,10 +1524,10 @@ - + - + @@ -1516,26 +1545,27 @@ - + - + - + - + - + @@ -1547,10 +1577,10 @@ - + - + @@ -1568,26 +1598,27 @@ - + - + - + - + - + @@ -1599,10 +1630,10 @@ - + - + @@ -1620,26 +1651,27 @@ - + - + - + - + - + @@ -1651,10 +1683,10 @@ - + - + @@ -1672,26 +1704,27 @@ - + - + - + - + - + @@ -1703,10 +1736,10 @@ - + - + @@ -1724,26 +1757,27 @@ - + - + - + - + - + @@ -1755,10 +1789,10 @@ - + - + @@ -1776,26 +1810,27 @@ - + - + - + - + - + @@ -1807,10 +1842,10 @@ - + - + @@ -1828,26 +1863,27 @@ - + - + - + - + - + @@ -1859,10 +1895,10 @@ - + - + @@ -1880,26 +1916,27 @@ - + - + - + - + - + @@ -1911,10 +1948,10 @@ - + - + @@ -1932,26 +1969,27 @@ - + - + - + - + - + @@ -1963,10 +2001,10 @@ - + - + @@ -1984,26 +2022,27 @@ - + - + - + - + - + @@ -2015,10 +2054,10 @@ - + - + @@ -2036,26 +2075,27 @@ - + - + - + - + - + @@ -2067,10 +2107,10 @@ - + - + @@ -2088,26 +2128,27 @@ - + - + - + - + - + @@ -2119,10 +2160,10 @@ - + - + @@ -2140,26 +2181,27 @@ - + - + - + - + - + @@ -2171,10 +2213,10 @@ - + - + @@ -2192,26 +2234,27 @@ - + - + - + - + - + @@ -2223,10 +2266,10 @@ - + - + @@ -2244,26 +2287,27 @@ - + - + - + - + - + @@ -2275,10 +2319,10 @@ - + - + @@ -2296,26 +2340,27 @@ - + - + - + - + - + @@ -2327,10 +2372,10 @@ - + - + @@ -2348,26 +2393,27 @@ - + - + - + - + - + @@ -2379,10 +2425,10 @@ - + - + @@ -2400,26 +2446,27 @@ - + - + - + - + - + @@ -2431,10 +2478,10 @@ - + - + @@ -2452,26 +2499,27 @@ - + - + - + - + - + @@ -2483,10 +2531,10 @@ - + - + @@ -2504,26 +2552,27 @@ - + - + - + - + - + @@ -2535,10 +2584,10 @@ - + - + @@ -2556,26 +2605,27 @@ - + - + - + - + - + @@ -2587,10 +2637,10 @@ - + - + @@ -2608,26 +2658,27 @@ - + - + - + - + - + @@ -2639,10 +2690,10 @@ - + - + @@ -2660,26 +2711,27 @@ - + - + - + - + - + @@ -2691,10 +2743,10 @@ - + - + @@ -2712,26 +2764,27 @@ - + - + - + - + - + @@ -2743,10 +2796,10 @@ - + - + @@ -2764,26 +2817,27 @@ - + - + - + - + - + @@ -2795,10 +2849,10 @@ - + - + @@ -2816,26 +2870,27 @@ - + - + - + - + - + @@ -2847,10 +2902,10 @@ - + - + @@ -2868,26 +2923,27 @@ - + - + - + - + - + @@ -2899,10 +2955,10 @@ - + - + @@ -2920,26 +2976,27 @@ - + - + - + - + - + @@ -2951,10 +3008,10 @@ - + - + @@ -2972,26 +3029,27 @@ - + - + - + - + - + @@ -3003,10 +3061,10 @@ - + - + @@ -3024,26 +3082,27 @@ - + - + - + - + - + @@ -3055,10 +3114,10 @@ - + - + @@ -3076,26 +3135,27 @@ - + - + - + - + - + @@ -3107,10 +3167,10 @@ - + - + @@ -3128,26 +3188,27 @@ - + - + - + - + - + @@ -3159,10 +3220,10 @@ - + - + @@ -3180,26 +3241,27 @@ - + - + - + - + - + @@ -3211,10 +3273,10 @@ - + - + @@ -3232,26 +3294,27 @@ - + - + - + - + - + @@ -3263,10 +3326,10 @@ - + - + @@ -3284,26 +3347,27 @@ - + - + - + - + - + @@ -3315,10 +3379,10 @@ - + - + @@ -3336,26 +3400,27 @@ - + - + - + - + - + @@ -3367,10 +3432,10 @@ - + - + @@ -3388,26 +3453,27 @@ - + - + - + - + - + @@ -3419,10 +3485,10 @@ - + - + @@ -3440,26 +3506,27 @@ - + - + - + - + - + @@ -3471,10 +3538,10 @@ - + - + @@ -3492,26 +3559,27 @@ - + - + - + - + - + @@ -3523,10 +3591,10 @@ - + - + @@ -3544,26 +3612,27 @@ - + - + - + - + - + @@ -3575,10 +3644,10 @@ - + - + @@ -3596,26 +3665,27 @@ - + - + - + - + - + @@ -3627,10 +3697,10 @@ - + - + @@ -3648,26 +3718,27 @@ - + - + - + - + - + @@ -3679,10 +3750,10 @@ - + - + @@ -3700,26 +3771,27 @@ - + - + - + - + - + @@ -3731,10 +3803,10 @@ - + - + @@ -3752,26 +3824,27 @@ - + - + - + - + - + @@ -3783,10 +3856,10 @@ - + - + @@ -3804,26 +3877,27 @@ - + - + - + - + - + @@ -3835,10 +3909,10 @@ - + - + @@ -3856,26 +3930,27 @@ - + - + - + - + - + @@ -3887,10 +3962,10 @@ - + - + @@ -3908,26 +3983,27 @@ - + - + - + - + - + @@ -3939,10 +4015,10 @@ - + - + diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 92f0d8fc20..2313c25270 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -195,6 +195,7 @@ "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_default.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.0": "fa85978e837e68f094d3673089626ad792073985659a9b1a7a15b52869f19f56", "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.1": "ea95969c874a63d39ca3ad2a231cdbc9c4973631cd6336c633182cbc61c3d3f2", + "iris.tests.test_plot.TestPlotHist.test_cube.0": "b59cc3dadb433c24c4f16603943a793591a7c3dcb4dcbccc68c697a93b139131", "iris.tests.test_plot.TestPlotOtherCoordSystems.test_plot_tmerc.0": "e665326d999ecc92b399b32466269326b369cccccccd64d96199631364f33333", "iris.tests.test_plot.TestQuickplotPlot.test_t.0": "83ffb59a7f00e59a2205d9d6e4619a74d9388c8e884e8da799d30b6dddb47e00", "iris.tests.test_plot.TestQuickplotPlot.test_t_dates.0": "82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69", @@ -216,6 +217,8 @@ "iris.tests.test_quickplot.TestLabels.test_pcolor.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", "iris.tests.test_quickplot.TestLabels.test_pcolormesh.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": "eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12d21c2ff3f80", + "iris.tests.test_quickplot.TestPlotHist.test_horizontal.0": "b59cc3dadb433c24c4f166039438793591a7dbdcbcdc9ccc68c697a91b139131", + "iris.tests.test_quickplot.TestPlotHist.test_vertical.0": "bf80c7c6c07d7959647e343a33364b699589c6c64ec0312b9e227ad681ffcc68", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": "fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a807e3f4e951ac07e3f81c0ff3ea16aa1c0bd3e81", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml similarity index 64% rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml index 09d54a1b19..975488f656 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml +++ b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml @@ -8,28 +8,28 @@ + [[5343, 5396, 5449, 5502, 5555, 5608], + [5661, 5714, 5767, 5820, 5873, 5926], + [5979, 6032, 6085, 6138, 6191, 6244], + [6297, 6350, 6403, 6456, 6509, 6562], + [6615, 6668, 6721, 6774, 6827, 6880]]]" shape="(4, 5, 6)" standard_name="altitude" units="Unit('m')" value_type="int64"> @@ -52,10 +52,10 @@ + [106, 107, 108, 109, 110, 111], + [112, 113, 114, 115, 116, 117], + [118, 119, 120, 121, 122, 123], + [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" standard_name="surface_altitude" units="Unit('m')" value_type="int64" var_name="surface_altitude"/> @@ -72,28 +72,28 @@ + [[53043, 53573, 54103, 54633, 55163, 55693], + [56223, 56753, 57283, 57813, 58343, 58873], + [59403, 59933, 60463, 60993, 61523, 62053], + [62583, 63113, 63643, 64173, 64703, 65233], + [65763, 66293, 66823, 67353, 67883, 68413]]]" shape="(4, 5, 6)" standard_name="altitude" units="Unit('m')" value_type="int64"> @@ -116,10 +116,10 @@ + [1060, 1070, 1080, 1090, 1100, 1110], + [1120, 1130, 1140, 1150, 1160, 1170], + [1180, 1190, 1200, 1210, 1220, 1230], + [1240, 1250, 1260, 1270, 1280, 1290]]" shape="(5, 6)" units="Unit('m')" value_type="int64" var_name="surface_altitude_0"/> diff --git a/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/basic_save.cdl b/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/basic_save.cdl new file mode 100644 index 0000000000..133c886d87 --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/basic_save.cdl @@ -0,0 +1,34 @@ +dimensions: + latitude = 181 ; + levelist = 60 ; + longitude = 360 ; + time = 1 ; +variables: + double co2(time, levelist, latitude, longitude) ; + co2:long_name = "Carbon Dioxide" ; + co2:units = "kg kg**-1" ; + int time(time) ; + time:axis = "T" ; + time:units = "hours since 1900-01-01 00:00:0.0" ; + time:standard_name = "time" ; + time:long_name = "time" ; + time:calendar = "standard" ; + int levelist(levelist) ; + levelist:long_name = "model_level_number" ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + latitude:long_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "longitude" ; + double lnsp(time, levelist, latitude, longitude) ; + lnsp:long_name = "Logarithm of surface pressure" ; + +// global attributes: + :history = "2009-08-25 13:46:31 GMT by mars2netcdf-0.92" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/path_string_save_same.cdl b/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/path_string_save_same.cdl new file mode 100644 index 0000000000..133c886d87 --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/path_string_save_same.cdl @@ -0,0 +1,34 @@ +dimensions: + latitude = 181 ; + levelist = 60 ; + longitude = 360 ; + time = 1 ; +variables: + double co2(time, levelist, latitude, longitude) ; + co2:long_name = "Carbon Dioxide" ; + co2:units = "kg kg**-1" ; + int time(time) ; + time:axis = "T" ; + time:units = "hours since 1900-01-01 00:00:0.0" ; + time:standard_name = "time" ; + time:long_name = "time" ; + time:calendar = "standard" ; + int levelist(levelist) ; + levelist:long_name = "model_level_number" ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + latitude:long_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "longitude" ; + double lnsp(time, levelist, latitude, longitude) ; + lnsp:long_name = "Logarithm of surface pressure" ; + +// global attributes: + :history = "2009-08-25 13:46:31 GMT by mars2netcdf-0.92" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl diff --git a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple.cml b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple.cml index bac3f9b503..d6d0610e1f 100644 --- a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple.cml +++ b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple.cml @@ -8,41 +8,41 @@ - + - + - + - + - + @@ -54,12 +54,12 @@ - + - + diff --git a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple_callback.cml b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple_callback.cml index 971f76d618..3bd43f0468 100644 --- a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple_callback.cml +++ b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadFF/simple_callback.cml @@ -9,41 +9,41 @@ - + - + - + - + - + @@ -55,12 +55,12 @@ - + - + diff --git a/lib/iris/tests/results/iterate/izip_nd_ortho.cml b/lib/iris/tests/results/iterate/izip_nd_ortho.cml index 1b8fded247..c489067dd0 100644 --- a/lib/iris/tests/results/iterate/izip_nd_ortho.cml +++ b/lib/iris/tests/results/iterate/izip_nd_ortho.cml @@ -3,18 +3,18 @@ - + - + @@ -26,18 +26,18 @@ - + - + @@ -49,18 +49,18 @@ - + - + @@ -72,18 +72,18 @@ - + - + @@ -95,18 +95,18 @@ - + - + @@ -118,18 +118,18 @@ - + - + @@ -141,18 +141,18 @@ - + - + @@ -164,18 +164,18 @@ - + - + @@ -187,18 +187,18 @@ - + - + @@ -210,18 +210,18 @@ - + - + diff --git a/lib/iris/tests/results/merge/a_aux_b_aux.cml b/lib/iris/tests/results/merge/a_aux_b_aux.cml index 88af58cbe7..7701d3f461 100644 --- a/lib/iris/tests/results/merge/a_aux_b_aux.cml +++ b/lib/iris/tests/results/merge/a_aux_b_aux.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/a_aux_b_dim.cml b/lib/iris/tests/results/merge/a_aux_b_dim.cml index 88af58cbe7..7701d3f461 100644 --- a/lib/iris/tests/results/merge/a_aux_b_dim.cml +++ b/lib/iris/tests/results/merge/a_aux_b_dim.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/a_dim_b_aux.cml b/lib/iris/tests/results/merge/a_dim_b_aux.cml index 88af58cbe7..7701d3f461 100644 --- a/lib/iris/tests/results/merge/a_dim_b_aux.cml +++ b/lib/iris/tests/results/merge/a_dim_b_aux.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/a_dim_b_dim.cml b/lib/iris/tests/results/merge/a_dim_b_dim.cml index 88af58cbe7..7701d3f461 100644 --- a/lib/iris/tests/results/merge/a_dim_b_dim.cml +++ b/lib/iris/tests/results/merge/a_dim_b_dim.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/dec.cml b/lib/iris/tests/results/merge/dec.cml index 4efd40910f..6d72d19917 100644 --- a/lib/iris/tests/results/merge/dec.cml +++ b/lib/iris/tests/results/merge/dec.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + @@ -148,129 +159,140 @@ - + - + - + - + - + - + - + - + @@ -288,130 +310,140 @@ - + - + - + - + - + - + - + - + @@ -429,130 +461,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/merge/separable_combination.cml b/lib/iris/tests/results/merge/separable_combination.cml index 25802c8228..db14b667e3 100644 --- a/lib/iris/tests/results/merge/separable_combination.cml +++ b/lib/iris/tests/results/merge/separable_combination.cml @@ -3,45 +3,47 @@ - + - + - + - + diff --git a/lib/iris/tests/results/merge/single_split.cml b/lib/iris/tests/results/merge/single_split.cml index fffe59fe02..afa0fd0b6e 100644 --- a/lib/iris/tests/results/merge/single_split.cml +++ b/lib/iris/tests/results/merge/single_split.cml @@ -10,8 +10,8 @@ + [2, 3], + [4, 5]]" shape="(3, 2)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/merge/string_a_b.cml b/lib/iris/tests/results/merge/string_a_b.cml index 48b46979c1..5d0304efd8 100644 --- a/lib/iris/tests/results/merge/string_a_b.cml +++ b/lib/iris/tests/results/merge/string_a_b.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/string_a_with_aux.cml b/lib/iris/tests/results/merge/string_a_with_aux.cml index 1f41f23aad..479cb28347 100644 --- a/lib/iris/tests/results/merge/string_a_with_aux.cml +++ b/lib/iris/tests/results/merge/string_a_with_aux.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/string_a_with_dim.cml b/lib/iris/tests/results/merge/string_a_with_dim.cml index 1f41f23aad..479cb28347 100644 --- a/lib/iris/tests/results/merge/string_a_with_dim.cml +++ b/lib/iris/tests/results/merge/string_a_with_dim.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/string_b_with_aux.cml b/lib/iris/tests/results/merge/string_b_with_aux.cml index 8710767c41..980e33b439 100644 --- a/lib/iris/tests/results/merge/string_b_with_aux.cml +++ b/lib/iris/tests/results/merge/string_b_with_aux.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/string_b_with_dim.cml b/lib/iris/tests/results/merge/string_b_with_dim.cml index 8710767c41..980e33b439 100644 --- a/lib/iris/tests/results/merge/string_b_with_dim.cml +++ b/lib/iris/tests/results/merge/string_b_with_dim.cml @@ -3,10 +3,10 @@ - + - + diff --git a/lib/iris/tests/results/merge/theta.cml b/lib/iris/tests/results/merge/theta.cml index 0e5b02be51..b9bf8ce411 100644 --- a/lib/iris/tests/results/merge/theta.cml +++ b/lib/iris/tests/results/merge/theta.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/merge/theta_two_times.cml b/lib/iris/tests/results/merge/theta_two_times.cml index d1c9f59ace..991ac41da9 100644 --- a/lib/iris/tests/results/merge/theta_two_times.cml +++ b/lib/iris/tests/results/merge/theta_two_times.cml @@ -8,498 +8,520 @@ - + - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + @@ -508,7 +530,7 @@ - + diff --git a/lib/iris/tests/results/merge/time_triple_merging2.cml b/lib/iris/tests/results/merge/time_triple_merging2.cml index b838ad569a..6be51a0396 100644 --- a/lib/iris/tests/results/merge/time_triple_merging2.cml +++ b/lib/iris/tests/results/merge/time_triple_merging2.cml @@ -10,7 +10,7 @@ + [3, 4, 5]]" shape="(2, 3)" standard_name="time" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/merge/time_triple_merging4.cml b/lib/iris/tests/results/merge/time_triple_merging4.cml index 147bf1adf8..f508578e7f 100644 --- a/lib/iris/tests/results/merge/time_triple_merging4.cml +++ b/lib/iris/tests/results/merge/time_triple_merging4.cml @@ -10,8 +10,8 @@ + [1, 4], + [2, 5]]" shape="(3, 2)" standard_name="time" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/merge/time_triple_successive_forecasts.cml b/lib/iris/tests/results/merge/time_triple_successive_forecasts.cml index ff0d9cdad2..c8d5a993c3 100644 --- a/lib/iris/tests/results/merge/time_triple_successive_forecasts.cml +++ b/lib/iris/tests/results/merge/time_triple_successive_forecasts.cml @@ -10,8 +10,8 @@ + [11, 12, 13, 14], + [12, 13, 14, 15]]" shape="(3, 4)" standard_name="time" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/merge/time_triple_time_non_dim_coord.cml b/lib/iris/tests/results/merge/time_triple_time_non_dim_coord.cml index b3c2c26d96..76e45b4a3f 100644 --- a/lib/iris/tests/results/merge/time_triple_time_non_dim_coord.cml +++ b/lib/iris/tests/results/merge/time_triple_time_non_dim_coord.cml @@ -3,14 +3,14 @@ - + - + diff --git a/lib/iris/tests/results/merge/time_triple_time_vs_forecast.cml b/lib/iris/tests/results/merge/time_triple_time_vs_forecast.cml index f0d625671f..6bc573e6d5 100644 --- a/lib/iris/tests/results/merge/time_triple_time_vs_forecast.cml +++ b/lib/iris/tests/results/merge/time_triple_time_vs_forecast.cml @@ -6,10 +6,10 @@ - + diff --git a/lib/iris/tests/results/merge/time_triple_time_vs_ref_time.cml b/lib/iris/tests/results/merge/time_triple_time_vs_ref_time.cml index cef8a71d1f..0a85d5394c 100644 --- a/lib/iris/tests/results/merge/time_triple_time_vs_ref_time.cml +++ b/lib/iris/tests/results/merge/time_triple_time_vs_ref_time.cml @@ -4,9 +4,9 @@ + [3, 2, 1], + [4, 3, 2], + [5, 4, 3]]" shape="(4, 3)" standard_name="forecast_period" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/name/NAMEIII_field.cml b/lib/iris/tests/results/name/NAMEIII_field.cml index c419a2760d..0c88fcc7d5 100644 --- a/lib/iris/tests/results/name/NAMEIII_field.cml +++ b/lib/iris/tests/results/name/NAMEIII_field.cml @@ -25,33 +25,33 @@ + [50.612, 50.613], + [50.613, 50.614], + ..., + [50.808, 50.809], + [50.809, 50.81 ], + [50.81 , 50.811]]" id="77a50eb5" points="[50.6115, 50.6125, 50.6135, ..., 50.8085, 50.8095, + 50.8105]" shape="(200,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> + [-3.6773, -3.6755], + [-3.6755, -3.6737], + ..., + [-3.3245, -3.3227], + [-3.3227, -3.3209], + [-3.3209, -3.3191]]" id="f913a8b3" points="[-3.6782, -3.6764, -3.6746, ..., -3.3236, -3.3218, + -3.32 ]" shape="(200,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"> - + - + @@ -90,33 +90,33 @@ + [50.612, 50.613], + [50.613, 50.614], + ..., + [50.808, 50.809], + [50.809, 50.81 ], + [50.81 , 50.811]]" id="77a50eb5" points="[50.6115, 50.6125, 50.6135, ..., 50.8085, 50.8095, + 50.8105]" shape="(200,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> + [-3.6773, -3.6755], + [-3.6755, -3.6737], + ..., + [-3.3245, -3.3227], + [-3.3227, -3.3209], + [-3.3209, -3.3191]]" id="f913a8b3" points="[-3.6782, -3.6764, -3.6746, ..., -3.3236, -3.3218, + -3.32 ]" shape="(200,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"> - + - + @@ -154,33 +154,33 @@ + [50.612, 50.613], + [50.613, 50.614], + ..., + [50.808, 50.809], + [50.809, 50.81 ], + [50.81 , 50.811]]" id="77a50eb5" points="[50.6115, 50.6125, 50.6135, ..., 50.8085, 50.8095, + 50.8105]" shape="(200,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> + [-3.6773, -3.6755], + [-3.6755, -3.6737], + ..., + [-3.3245, -3.3227], + [-3.3227, -3.3209], + [-3.3209, -3.3191]]" id="f913a8b3" points="[-3.6782, -3.6764, -3.6746, ..., -3.3236, -3.3218, + -3.32 ]" shape="(200,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"> - + - + @@ -218,33 +218,33 @@ + [50.612, 50.613], + [50.613, 50.614], + ..., + [50.808, 50.809], + [50.809, 50.81 ], + [50.81 , 50.811]]" id="77a50eb5" points="[50.6115, 50.6125, 50.6135, ..., 50.8085, 50.8095, + 50.8105]" shape="(200,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> + [-3.6773, -3.6755], + [-3.6755, -3.6737], + ..., + [-3.3245, -3.3227], + [-3.3227, -3.3209], + [-3.3209, -3.3191]]" id="f913a8b3" points="[-3.6782, -3.6764, -3.6746, ..., -3.3236, -3.3218, + -3.32 ]" shape="(200,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"> - + - + @@ -282,33 +282,33 @@ + [50.612, 50.613], + [50.613, 50.614], + ..., + [50.808, 50.809], + [50.809, 50.81 ], + [50.81 , 50.811]]" id="77a50eb5" points="[50.6115, 50.6125, 50.6135, ..., 50.8085, 50.8095, + 50.8105]" shape="(200,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> + [-3.6773, -3.6755], + [-3.6755, -3.6737], + ..., + [-3.3245, -3.3227], + [-3.3227, -3.3209], + [-3.3209, -3.3191]]" id="f913a8b3" points="[-3.6782, -3.6764, -3.6746, ..., -3.3236, -3.3218, + -3.32 ]" shape="(200,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"> - + - + diff --git a/lib/iris/tests/results/name/NAMEIII_timeseries.cml b/lib/iris/tests/results/name/NAMEIII_timeseries.cml index 3776bfc27f..1a40388a1f 100644 --- a/lib/iris/tests/results/name/NAMEIII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEIII_timeseries.cml @@ -35,33 +35,30 @@ - + - + @@ -106,33 +103,30 @@ - + - + @@ -176,33 +170,30 @@ - + - + @@ -246,33 +237,30 @@ - + - + @@ -316,33 +304,30 @@ - + - + diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory.cml b/lib/iris/tests/results/name/NAMEIII_trajectory.cml index 20a0ec3b82..121b174169 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory.cml @@ -10,36 +10,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -55,36 +55,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -100,36 +100,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -145,36 +145,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -190,36 +190,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -235,36 +235,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -280,36 +280,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -325,36 +325,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -370,36 +370,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -415,36 +415,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -460,36 +460,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -505,36 +505,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -550,36 +550,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -595,36 +595,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -640,36 +640,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -685,36 +685,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + @@ -730,36 +730,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml index d337ca9454..63d092c385 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml @@ -10,36 +10,36 @@ - + - + - + - + - + - + + 57.13152, 57.03173]" shape="(836,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> + 34.37071, 34.42273]" shape="(836,)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/name/NAMEIII_version2.cml b/lib/iris/tests/results/name/NAMEIII_version2.cml index 0ad0c883a2..7f08b1be02 100644 --- a/lib/iris/tests/results/name/NAMEIII_version2.cml +++ b/lib/iris/tests/results/name/NAMEIII_version2.cml @@ -25,17 +25,17 @@ - + @@ -48,35 +48,34 @@ - + @@ -107,17 +106,17 @@ - + @@ -130,35 +129,34 @@ - + @@ -189,17 +187,17 @@ - + @@ -212,35 +210,34 @@ - + @@ -271,17 +268,17 @@ - + @@ -294,35 +291,34 @@ - + diff --git a/lib/iris/tests/results/name/NAMEII_field.cml b/lib/iris/tests/results/name/NAMEII_field.cml index 7d88c06eff..bd9e5af492 100644 --- a/lib/iris/tests/results/name/NAMEII_field.cml +++ b/lib/iris/tests/results/name/NAMEII_field.cml @@ -19,39 +19,38 @@ - + - + - + - + @@ -80,39 +79,38 @@ - + - + - + - + @@ -141,35 +139,34 @@ - + - + - + - + @@ -202,35 +199,34 @@ - + - + - + - + @@ -263,35 +259,34 @@ - + - + - + - + diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml index 9bc2c0d1ac..94d830d2df 100644 --- a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml +++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml @@ -20,21 +20,21 @@ + [30.0625, 30.1875]]" id="77a50eb5" points="[30. , 30.125]" shape="(2,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> - + - + - + diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml index 8d1ad620d0..88e611b8ae 100644 --- a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml +++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml @@ -20,21 +20,21 @@ + [30.0625, 30.1875]]" id="77a50eb5" points="[30. , 30.125]" shape="(2,)" standard_name="latitude" units="Unit('degrees')" value_type="float64"> - + - + - + diff --git a/lib/iris/tests/results/name/NAMEII_timeseries.cml b/lib/iris/tests/results/name/NAMEII_timeseries.cml index 39af8a6288..c3d5cb3e01 100644 --- a/lib/iris/tests/results/name/NAMEII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEII_timeseries.cml @@ -30,16 +30,16 @@ + [370345.5, 370346.5], + [370346.5, 370347.5], + ..., + [370473.5, 370474.5], + [370474.5, 370475.5], + [370475.5, 370476.5]]" id="cb784457" points="[370345., 370346., 370347., ..., 370474., 370475., + 370476.]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + @@ -79,16 +79,16 @@ + [370345.5, 370346.5], + [370346.5, 370347.5], + ..., + [370473.5, 370474.5], + [370474.5, 370475.5], + [370475.5, 370476.5]]" id="cb784457" points="[370345., 370346., 370347., ..., 370474., 370475., + 370476.]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml index e48cf41d2a..616b338a25 100644 --- a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml @@ -10,6 +10,6 @@ - + diff --git a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml index 78fec459e9..2a604d90fc 100644 --- a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml @@ -10,6 +10,6 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml index 3847d5a417..55a9d96fb5 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml index 89ee5ac195..49fa36bdd0 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml @@ -8,10 +8,11 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml index b3c7709dae..59e6e6dd2a 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml @@ -8,10 +8,10 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml index ea5e42150e..8be80d076d 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml index b028ee6cf8..9801774f95 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml @@ -8,7 +8,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml index 76f66e1bc4..6c1509d970 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml @@ -8,16 +8,18 @@ - + - + + 929226, 929232, 929238, 929244, 929250, 929256, + 929262, 929268, 929274, 929280, 929286, 929292, + 929298, 929304]" shape="(20,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml index 133cc4f659..d2df8b571c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml @@ -8,14 +8,16 @@ - + - + + 929226, 929232, 929238, 929244]" shape="(10,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml index 1d7025751e..24765c4020 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml index 1f5a990bd4..6dceb8b067 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml index 9c32197e56..e7780ff856 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml index 100ab1257c..4118b82422 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml @@ -8,10 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml index 22a4ff1989..1dfd4ec937 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml @@ -20,69 +20,70 @@ - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml index fc6772e5f0..176bff2046 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml @@ -8,18 +8,20 @@ - + - + + 929226, 929232, 929238, 929244, 929250, 929256, + 929262, 929268, 929274, 929280, 929286, 929292, + 929298, 929304, 929310, 929316, 929322, 929328, + 929334, 929340, 929346, 929352, 929358, 929364, + 929370]" shape="(31,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml index 9d6b3c1e43..855c767206 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml @@ -7,17 +7,17 @@ - + - + - + @@ -33,17 +33,17 @@ - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml index 15ab300757..7e52524dba 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml @@ -7,17 +7,17 @@ - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml index 29ff3b9bd9..baeeec4a53 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml @@ -7,17 +7,17 @@ - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_laea.cml b/lib/iris/tests/results/netcdf/netcdf_laea.cml index 799f40522b..cf3f223b23 100644 --- a/lib/iris/tests/results/netcdf/netcdf_laea.cml +++ b/lib/iris/tests/results/netcdf/netcdf_laea.cml @@ -8,62 +8,68 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_lcc.cml b/lib/iris/tests/results/netcdf/netcdf_lcc.cml index 592c33d534..8239b1802b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_lcc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_lcc.cml @@ -11,84 +11,77 @@ - + - + + 229500, 230500, 231500, 232500, 233500, 234500, + 235500, 236500, 237500, 238500, 239500, 240500, + 241500, 242500, 243500, 244500, 245500, 246500, + 247500, 248500, 249500, 250500, 251500, 252500, + 253500, 254500, 255500, 256500, 257500, 258500, + 259500, 260500, 261500, 262500, 263500, 264500, + 265500, 266500, 267500, 268500, 269500, 270500, + 271500, 272500, 273500, 274500, 275500, 276500, + 277500, 278500, 279500, 280500, 281500, 282500]" shape="(60,)" standard_name="projection_x_coordinate" units="Unit('meters')" value_type="int32" var_name="x"> + 352500, 353500, 354500, 355500, 356500, 357500, + 358500, 359500, 360500, 361500, 362500, 363500, + 364500, 365500, 366500, 367500, 368500, 369500, + 370500, 371500, 372500, 373500, 374500, 375500, + 376500, 377500, 378500, 379500, 380500, 381500, + 382500, 383500, 384500, 385500, 386500, 387500, + 388500, 389500, 390500, 391500, 392500, 393500, + 394500, 395500, 396500, 397500, 398500, 399500, + 400500, 401500, 402500, 403500, 404500, 405500]" shape="(60,)" standard_name="projection_y_coordinate" units="Unit('meters')" value_type="int32" var_name="y"> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc.cml b/lib/iris/tests/results/netcdf/netcdf_merc.cml index c06a2efe88..5143b400fe 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc.cml @@ -24,48 +24,49 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml index 1e50aa6e65..a313169fa1 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml @@ -6,16 +6,16 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml index c9ad4ca33f..341961ce35 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml @@ -6,12 +6,12 @@ - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml index 3385ecd6fe..3692fc9c47 100644 --- a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml +++ b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml @@ -30,7 +30,7 @@ - + @@ -48,7 +48,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_polar.cml b/lib/iris/tests/results/netcdf/netcdf_polar.cml index 15c1a90da9..17f0ce1b60 100644 --- a/lib/iris/tests/results/netcdf/netcdf_polar.cml +++ b/lib/iris/tests/results/netcdf/netcdf_polar.cml @@ -24,19 +24,19 @@ - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_rotated_xy_land.cml b/lib/iris/tests/results/netcdf/netcdf_rotated_xy_land.cml index d975768a15..86f9358b60 100644 --- a/lib/iris/tests/results/netcdf/netcdf_rotated_xy_land.cml +++ b/lib/iris/tests/results/netcdf/netcdf_rotated_xy_land.cml @@ -13,57 +13,73 @@ - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml index 05e5fe475d..aa06d353e1 100644 --- a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml +++ b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml @@ -11,50 +11,52 @@ - + - + - + - + + [2923.5, 2924.5], + [2924.5, 2925.5], + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml index fbecdf97d3..b7a4699003 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml @@ -9,506 +9,531 @@ - + - + - + - + - + - + - + - + - + @@ -517,8 +542,9 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml index 54bcc8a686..eb8cb9073f 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml @@ -11,50 +11,52 @@ - + - + - + - + + [2923.5, 2924.5], + [2924.5, 2925.5], + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_stereo.cml b/lib/iris/tests/results/netcdf/netcdf_stereo.cml index fae7ff027b..2a2918fa0c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_stereo.cml +++ b/lib/iris/tests/results/netcdf/netcdf_stereo.cml @@ -24,49 +24,49 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml index 0575c684a9..747f0c4846 100644 --- a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml +++ b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml @@ -11,58 +11,49 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/netcdf/save_load_traj.cml b/lib/iris/tests/results/netcdf/save_load_traj.cml index 9b225d127f..65ed143990 100644 --- a/lib/iris/tests/results/netcdf/save_load_traj.cml +++ b/lib/iris/tests/results/netcdf/save_load_traj.cml @@ -8,24 +8,26 @@ - + - + - + - + - + - + @@ -36,6 +38,6 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml index e48cf41d2a..616b338a25 100644 --- a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml @@ -10,6 +10,6 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml index 78fec459e9..2a604d90fc 100644 --- a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml @@ -10,6 +10,6 @@ - + diff --git a/lib/iris/tests/results/nimrod/levels_below_ground.cml b/lib/iris/tests/results/nimrod/levels_below_ground.cml index c7a5bb1713..7294f8486e 100644 --- a/lib/iris/tests/results/nimrod/levels_below_ground.cml +++ b/lib/iris/tests/results/nimrod/levels_below_ground.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/load_2flds.cml b/lib/iris/tests/results/nimrod/load_2flds.cml index 41e92dd48b..62ce20b7a9 100644 --- a/lib/iris/tests/results/nimrod/load_2flds.cml +++ b/lib/iris/tests/results/nimrod/load_2flds.cml @@ -11,20 +11,20 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/probability_fields.cml b/lib/iris/tests/results/nimrod/probability_fields.cml index 184d205132..a5b84970d4 100644 --- a/lib/iris/tests/results/nimrod/probability_fields.cml +++ b/lib/iris/tests/results/nimrod/probability_fields.cml @@ -20,20 +20,20 @@ - + - + - + @@ -65,10 +65,10 @@ - + @@ -78,12 +78,12 @@ - + - + @@ -114,19 +114,19 @@ - + - + - + @@ -161,25 +161,25 @@ - + - + - + - + @@ -213,24 +213,24 @@ - + - + - + - + @@ -281,12 +281,12 @@ - + - + @@ -321,12 +321,12 @@ - + - + @@ -361,19 +361,19 @@ - + - + - + @@ -405,19 +405,19 @@ - + - + - + @@ -451,12 +451,12 @@ - + - + @@ -487,12 +487,12 @@ - + - + @@ -527,12 +527,12 @@ - + - + @@ -563,12 +563,12 @@ - + - + @@ -603,17 +603,17 @@ - + - + - + @@ -647,17 +647,17 @@ - + - + - + @@ -685,18 +685,18 @@ - + - + - + @@ -723,7 +723,7 @@ - + @@ -732,12 +732,12 @@ - + - + @@ -763,18 +763,18 @@ - + - + - + @@ -804,24 +804,24 @@ - + - + - + + [3.75, 6.25]]" id="573b41e6" points="[0.2, 5. ]" shape="(2,)" units="Unit('mm')" value_type="float32" var_name="threshold"> @@ -850,23 +850,23 @@ - + - + - + - + @@ -898,24 +898,24 @@ - + - + - + + [3. , 5. ]]" id="cecff436" points="[0.2, 4. ]" shape="(2,)" units="Unit('mm hr^-1')" value_type="float32" var_name="threshold"> @@ -946,18 +946,18 @@ - + - + - + @@ -989,18 +989,18 @@ - + - + - + @@ -1026,18 +1026,18 @@ - + - + - + @@ -1064,7 +1064,7 @@ - + @@ -1073,12 +1073,12 @@ - + - + @@ -1113,17 +1113,17 @@ - + - + - + @@ -1156,12 +1156,12 @@ - + - + @@ -1203,12 +1203,12 @@ - + - + @@ -1246,12 +1246,12 @@ - + - + @@ -1293,12 +1293,12 @@ - + - + @@ -1329,19 +1329,19 @@ - + - + - + @@ -1366,25 +1366,25 @@ - + - + - + - + @@ -1414,30 +1414,30 @@ - + - + - + - + - + @@ -1465,30 +1465,30 @@ - + - + - + - + - + @@ -1521,19 +1521,19 @@ - + - + - + @@ -1565,7 +1565,7 @@ - + @@ -1575,12 +1575,12 @@ - + - + @@ -1611,19 +1611,19 @@ - + - + - + @@ -1659,24 +1659,24 @@ - + - + - + - + @@ -1710,24 +1710,24 @@ - + - + - + - + @@ -1760,19 +1760,19 @@ - + - + - + @@ -1803,19 +1803,19 @@ - + - + - + @@ -1841,13 +1841,13 @@ - + - + @@ -1857,12 +1857,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml index a6ed9068ca..354f60857c 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml @@ -16,18 +16,18 @@ + [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + - + @@ -36,7 +36,7 @@ + [1580193900, 1580194800]]" id="90a3bd1c" points="[1580194800, 1580194800]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml index cf3232d548..03d4482d76 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml @@ -21,12 +21,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml index 2aa1576fad..80af248623 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml @@ -20,29 +20,37 @@ - + - + - + @@ -76,23 +84,23 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml index 3dc62cc8e9..2a60aa92a7 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -99,12 +99,12 @@ - + - + @@ -139,12 +139,12 @@ - + - + @@ -178,24 +178,24 @@ - + - + - + @@ -229,12 +229,12 @@ - + - + @@ -259,7 +259,7 @@ - + @@ -271,12 +271,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml index 9be61d489c..4e3a95118e 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml @@ -11,7 +11,7 @@ - + @@ -27,12 +27,12 @@ - + - + @@ -66,12 +66,12 @@ - + - + @@ -105,12 +105,12 @@ - + - + @@ -144,12 +144,12 @@ - + - + @@ -188,12 +188,12 @@ - + - + @@ -234,12 +234,12 @@ - + - + @@ -273,12 +273,12 @@ - + - + @@ -312,17 +312,17 @@ - + - + - + @@ -354,17 +354,17 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml index 734beb7f47..1811f3a613 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml @@ -11,7 +11,7 @@ - + @@ -27,12 +27,12 @@ - + - + @@ -57,7 +57,7 @@ - + @@ -73,12 +73,12 @@ - + - + @@ -103,7 +103,7 @@ - + @@ -119,12 +119,12 @@ - + - + @@ -149,7 +149,7 @@ - + @@ -165,12 +165,12 @@ - + - + @@ -195,7 +195,7 @@ - + @@ -211,12 +211,12 @@ - + - + @@ -250,19 +250,19 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml index 56bfecc1b4..f25eca3dc1 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -98,12 +98,12 @@ - + - + @@ -138,12 +138,12 @@ - + - + @@ -178,12 +178,12 @@ - + - + @@ -217,12 +217,12 @@ - + - + @@ -257,12 +257,12 @@ - + - + @@ -297,12 +297,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml index 2eb83d787b..6f86ca4825 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml @@ -20,12 +20,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml index 4f4c986a39..9de3064f95 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -60,12 +60,12 @@ - + - + @@ -100,12 +100,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml index dd6102ea7f..4b02455688 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml @@ -16,18 +16,18 @@ + [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + - + @@ -36,7 +36,7 @@ + [1580186700, 1580187600]]" id="90a3bd1c" points="[1580187600, 1580187600]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml index be1e89a53d..9797f7e1c1 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -60,12 +60,12 @@ - + - + @@ -100,12 +100,12 @@ - + - + @@ -139,12 +139,12 @@ - + - + @@ -179,12 +179,12 @@ - + - + @@ -219,12 +219,12 @@ - + - + @@ -258,12 +258,12 @@ - + - + @@ -298,12 +298,12 @@ - + - + @@ -338,12 +338,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml index 9a3ff88df8..8dbf6ea00b 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml index 00bc65f236..53af61c246 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -98,12 +98,12 @@ - + - + @@ -137,12 +137,12 @@ - + - + @@ -176,12 +176,12 @@ - + - + @@ -215,12 +215,12 @@ - + - + @@ -254,12 +254,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml index b2cf624214..eea66a698a 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -98,12 +98,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml index aaed20394f..7ff6f98296 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml @@ -11,9 +11,9 @@ - + @@ -29,12 +29,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml index 3a25dc86fc..4cfec2d195 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml @@ -20,29 +20,37 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml index fa4ab30a58..6c58db4f24 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml @@ -27,12 +27,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml index 918a0c7ae5..4110459787 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -60,12 +60,12 @@ - + - + @@ -99,12 +99,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml index 3a6c3bf53c..a58304ccca 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml @@ -11,7 +11,7 @@ - + @@ -27,12 +27,12 @@ - + - + @@ -57,7 +57,7 @@ - + @@ -73,12 +73,12 @@ - + - + @@ -103,7 +103,7 @@ - + @@ -119,12 +119,12 @@ - + - + @@ -149,7 +149,7 @@ - + @@ -165,12 +165,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml index eab889a8af..bd4c3c1a10 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -72,9 +72,9 @@ - + @@ -103,12 +103,12 @@ - + - + @@ -116,9 +116,9 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml index 6ff6359046..20cbda9ebe 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml @@ -28,12 +28,12 @@ - + - + @@ -75,12 +75,12 @@ - + - + @@ -121,12 +121,12 @@ - + - + @@ -167,12 +167,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml index 037cb5c2b6..73f4b6bf00 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml @@ -27,12 +27,12 @@ - + - + @@ -73,12 +73,12 @@ - + - + @@ -119,12 +119,12 @@ - + - + @@ -165,12 +165,12 @@ - + - + @@ -211,12 +211,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml index 5ca9920172..c2525727c3 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml @@ -21,19 +21,19 @@ - + - + - + @@ -67,19 +67,19 @@ - + - + - + @@ -113,19 +113,19 @@ - + - + - + @@ -159,19 +159,19 @@ - + - + - + @@ -205,19 +205,19 @@ - + - + - + @@ -252,19 +252,19 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml index 91c40ea6d0..4fb8590608 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml @@ -20,19 +20,19 @@ - + - + - + @@ -66,19 +66,19 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml index 3252dbf047..eb45fd6f84 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml @@ -20,19 +20,19 @@ - + - + - + @@ -66,19 +66,19 @@ - + - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml index d39fa0e367..8a3c428f7f 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml @@ -20,12 +20,12 @@ - + - + @@ -59,12 +59,12 @@ - + - + @@ -101,12 +101,12 @@ - + - + @@ -140,12 +140,12 @@ - + - + @@ -179,12 +179,12 @@ - + - + @@ -218,12 +218,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml index 4a5783ecb3..d4e53c93f7 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml @@ -21,12 +21,12 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml index d2c7e72848..706083f09d 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml @@ -27,12 +27,12 @@ - + - + @@ -40,8 +40,8 @@ - + diff --git a/lib/iris/tests/results/pandas/as_cube/data_frame_datetime_standard.cml b/lib/iris/tests/results/pandas/as_cube/data_frame_datetime_standard.cml index 84280753c7..762b535ee4 100644 --- a/lib/iris/tests/results/pandas/as_cube/data_frame_datetime_standard.cml +++ b/lib/iris/tests/results/pandas/as_cube/data_frame_datetime_standard.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/pandas/as_cube/data_frame_multidim.cml b/lib/iris/tests/results/pandas/as_cube/data_frame_multidim.cml index d377fa72d8..56b78b9a78 100644 --- a/lib/iris/tests/results/pandas/as_cube/data_frame_multidim.cml +++ b/lib/iris/tests/results/pandas/as_cube/data_frame_multidim.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/pandas/as_cube/data_frame_netcdftime_360.cml b/lib/iris/tests/results/pandas/as_cube/data_frame_netcdftime_360.cml index 39bc96f8e3..8d0981264b 100644 --- a/lib/iris/tests/results/pandas/as_cube/data_frame_netcdftime_360.cml +++ b/lib/iris/tests/results/pandas/as_cube/data_frame_netcdftime_360.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml index 5cb621d5f3..f8ecf3bccf 100644 --- a/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml +++ b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml @@ -3,8 +3,9 @@ - + diff --git a/lib/iris/tests/results/pandas/as_cube/series_netcdfimte_360.cml b/lib/iris/tests/results/pandas/as_cube/series_netcdfimte_360.cml index 0d9c4e149e..cef7b642d8 100644 --- a/lib/iris/tests/results/pandas/as_cube/series_netcdfimte_360.cml +++ b/lib/iris/tests/results/pandas/as_cube/series_netcdfimte_360.cml @@ -3,8 +3,9 @@ - + diff --git a/lib/iris/tests/results/pp_load_rules/global.cml b/lib/iris/tests/results/pp_load_rules/global.cml index a69e633e26..a013add0cb 100644 --- a/lib/iris/tests/results/pp_load_rules/global.cml +++ b/lib/iris/tests/results/pp_load_rules/global.cml @@ -7,37 +7,63 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml index ecf51190c7..49fafdf8ca 100644 --- a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml +++ b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml @@ -8,10 +8,10 @@ - + - + @@ -21,17 +21,19 @@ - + - + - + @@ -45,10 +47,10 @@ - + - + @@ -58,17 +60,19 @@ - + - + - + @@ -86,10 +90,10 @@ - + - + @@ -99,17 +103,19 @@ - + - + - + @@ -127,10 +133,10 @@ - + - + @@ -140,17 +146,19 @@ - + - + - + @@ -168,29 +176,33 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/pp_load_rules/lbtim_2.cml b/lib/iris/tests/results/pp_load_rules/lbtim_2.cml index 8ce7dd472c..5d43911642 100644 --- a/lib/iris/tests/results/pp_load_rules/lbtim_2.cml +++ b/lib/iris/tests/results/pp_load_rules/lbtim_2.cml @@ -7,32 +7,46 @@ - + - + - + - + diff --git a/lib/iris/tests/results/pp_load_rules/ocean_depth.cml b/lib/iris/tests/results/pp_load_rules/ocean_depth.cml index 9c33acdac4..ccd4557dce 100644 --- a/lib/iris/tests/results/pp_load_rules/ocean_depth.cml +++ b/lib/iris/tests/results/pp_load_rules/ocean_depth.cml @@ -7,35 +7,50 @@ - + - + - + - + - + @@ -47,7 +62,7 @@ - + diff --git a/lib/iris/tests/results/pp_load_rules/ocean_depth_bounded.cml b/lib/iris/tests/results/pp_load_rules/ocean_depth_bounded.cml index 5a959ad027..cb785002fd 100644 --- a/lib/iris/tests/results/pp_load_rules/ocean_depth_bounded.cml +++ b/lib/iris/tests/results/pp_load_rules/ocean_depth_bounded.cml @@ -7,39 +7,54 @@ - + - + - + - + - + @@ -51,7 +66,7 @@ - + diff --git a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml index ece399df4e..ae0f6c3a03 100644 --- a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml +++ b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml @@ -8,34 +8,35 @@ - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/stock/realistic_4d.cml b/lib/iris/tests/results/stock/realistic_4d.cml index 6640c54360..ea7b7c6f5a 100644 --- a/lib/iris/tests/results/stock/realistic_4d.cml +++ b/lib/iris/tests/results/stock/realistic_4d.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/system/supported_filetype_.grib2.cml b/lib/iris/tests/results/system/supported_filetype_.grib2.cml deleted file mode 100644 index 5376af2fe1..0000000000 --- a/lib/iris/tests/results/system/supported_filetype_.grib2.cml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/system/supported_filetype_.nc.cml b/lib/iris/tests/results/system/supported_filetype_.nc.cml index 6ad0a3b176..4fe3faea62 100644 --- a/lib/iris/tests/results/system/supported_filetype_.nc.cml +++ b/lib/iris/tests/results/system/supported_filetype_.nc.cml @@ -9,26 +9,22 @@ - + - + diff --git a/lib/iris/tests/results/system/supported_filetype_.pp.cml b/lib/iris/tests/results/system/supported_filetype_.pp.cml index e457b2921e..bfb54e9d7b 100644 --- a/lib/iris/tests/results/system/supported_filetype_.pp.cml +++ b/lib/iris/tests/results/system/supported_filetype_.pp.cml @@ -3,40 +3,36 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/trajectory/constant_latitude.cml b/lib/iris/tests/results/trajectory/constant_latitude.cml index 38c208b825..28ab781a9c 100644 --- a/lib/iris/tests/results/trajectory/constant_latitude.cml +++ b/lib/iris/tests/results/trajectory/constant_latitude.cml @@ -8,89 +8,103 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/trajectory/hybrid_height.cml b/lib/iris/tests/results/trajectory/hybrid_height.cml index 28e821b900..4a443704aa 100644 --- a/lib/iris/tests/results/trajectory/hybrid_height.cml +++ b/lib/iris/tests/results/trajectory/hybrid_height.cml @@ -4,28 +4,28 @@ + [[5343, 5396, 5449, 5502, 5555, 5608], + [5661, 5714, 5767, 5820, 5873, 5926], + [5979, 6032, 6085, 6138, 6191, 6244], + [6297, 6350, 6403, 6456, 6509, 6562], + [6615, 6668, 6721, 6774, 6827, 6880]]]" shape="(4, 5, 6)" standard_name="altitude" units="Unit('m')" value_type="int64"> @@ -48,10 +48,10 @@ + [106, 107, 108, 109, 110, 111], + [112, 113, 114, 115, 116, 117], + [118, 119, 120, 121, 122, 123], + [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" units="Unit('m')" value_type="int64"/> @@ -64,9 +64,9 @@ + [5192, 5855, 6212, 6569], + [5294, 5970, 6334, 6698], + [5396, 6085, 6456, 6827]]" shape="(4, 4)" standard_name="altitude" units="Unit('m')" value_type="int64"> diff --git a/lib/iris/tests/results/trajectory/single_point.cml b/lib/iris/tests/results/trajectory/single_point.cml index 64c71e0394..37d9e98ba7 100644 --- a/lib/iris/tests/results/trajectory/single_point.cml +++ b/lib/iris/tests/results/trajectory/single_point.cml @@ -8,11 +8,12 @@ - + - + @@ -25,70 +26,83 @@ - + - + - + - + diff --git a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml index 7b5bbfc086..fef87e7945 100644 --- a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml +++ b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml @@ -30,43 +30,47 @@ - + @@ -76,25 +80,35 @@ - + @@ -103,26 +117,35 @@ - + @@ -131,7 +154,7 @@ - + diff --git a/lib/iris/tests/results/trajectory/zigzag.cml b/lib/iris/tests/results/trajectory/zigzag.cml index 8a578c4ab4..71fd9ff0e5 100644 --- a/lib/iris/tests/results/trajectory/zigzag.cml +++ b/lib/iris/tests/results/trajectory/zigzag.cml @@ -8,37 +8,35 @@ - + - + - + - + - + @@ -52,10 +50,10 @@ - + - + diff --git a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml index 2592307cda..b89de9d558 100644 --- a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml +++ b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml @@ -6,51 +6,54 @@ - + - + - + - + @@ -62,24 +65,25 @@ - + + -2587546.39953, -862515.46651, 862515.46651, + 2587546.39953, 4312577.33255, 6037608.26557, + 7762639.19859]" shape="(10,)" standard_name="projection_y_coordinate" units="Unit('m')" value_type="float64"/> - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml index 1e74c9bc9c..9c4d58edc1 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml @@ -6,499 +6,522 @@ - + - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml index e318abad67..e45b9bfff7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml @@ -6,44 +6,54 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -54,56 +64,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml index e318abad67..e45b9bfff7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml @@ -6,44 +6,54 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -54,56 +64,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml index e318abad67..e45b9bfff7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml @@ -6,44 +6,54 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -54,56 +64,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml index e318abad67..e45b9bfff7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml @@ -6,44 +6,54 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -54,56 +64,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml index 82c79a7577..3d2e341c11 100644 --- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml @@ -6,41 +6,51 @@ - + - + + [1204, 1205, 1206, 1207], + [1208, 1209, 1210, 1211], + ..., + [1288, 1289, 1290, 1291], + [1292, 1293, 1294, 1295], + [1296, 1297, 1298, 1299]]" id="c83ed0ee" points="[3200, 3201, 3202, ..., 3297, 3298, 3299]" shape="(100,)" standard_name="latitude" units="Unit('unknown')" value_type="int64"/> - + @@ -48,56 +58,59 @@ + [1104, 1105, 1106, 1107], + [1108, 1109, 1110, 1111], + ..., + [1188, 1189, 1190, 1191], + [1192, 1193, 1194, 1195], + [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml index 86d7855b1b..206244f3c7 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml index 73d6073a4b..86f0bf7b52 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml index 8467544d44..5c169b2319 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml @@ -6,111 +6,125 @@ - + + [-0.12735, -0.12645], + [-0.12645, -0.12555], + ..., + [-0.04095, -0.04005], + [-0.04005, -0.03915], + [-0.03915, -0.03825]]" id="f1ab3066" points="[-0.1278, -0.1269, -0.126 , ..., -0.0405, -0.0396, + -0.0387]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float32"> - + - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata.cml b/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata.cml index 4ae37abf0b..f1f37e23b9 100644 --- a/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata.cml +++ b/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata.cml @@ -3,62 +3,62 @@ - + [[1100., 1106., 1112., ..., 1208., 1214., + 1220.], + [3260., 3266., 3272., ..., 3368., 3374., + 3380.], + [5420., 5426., 5432., ..., 5528., 5534., + 5540.]]]" shape="(4, 3, 21)" standard_name="altitude" units="Unit('m')" value_type="float64"> - + - + - + - + + 1780, 1790, 1800, 1810, 1820, 1830, 1840, 1850, + 1860, 1870, 1880, 1890, 1900], + [5300, 5310, 5320, 5330, 5340, 5350, 5360, 5370, + 5380, 5390, 5400, 5410, 5420, 5430, 5440, 5450, + 5460, 5470, 5480, 5490, 5500], + [8900, 8910, 8920, 8930, 8940, 8950, 8960, 8970, + 8980, 8990, 9000, 9010, 9020, 9030, 9040, 9050, + 9060, 9070, 9080, 9090, 9100]]" shape="(3, 21)" standard_name="surface_altitude" units="Unit('m')" value_type="int64"/> diff --git a/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata_wrapped.cml b/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata_wrapped.cml index 17eebb6ea4..48f0fa1aaa 100644 --- a/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata_wrapped.cml +++ b/lib/iris/tests/results/unit/cube/Cube/intersection__Metadata/metadata_wrapped.cml @@ -3,61 +3,65 @@ - + [[ 2180., 2186., 2192., ..., 128., 134., + 140.], + [ 4340., 4346., 4352., ..., 2288., 2294., + 2300.], + [ 6500., 6506., 6512., ..., 4448., 4454., + 4460.]]]" shape="(4, 3, 21)" standard_name="altitude" units="Unit('m')" value_type="float64"> - + - + - + - + - + diff --git a/lib/iris/tests/results/unit/cube/Cube/xml/ancils.cml b/lib/iris/tests/results/unit/cube/Cube/xml/ancils.cml index d2b55524f8..0db98eae1a 100644 --- a/lib/iris/tests/results/unit/cube/Cube/xml/ancils.cml +++ b/lib/iris/tests/results/unit/cube/Cube/xml/ancils.cml @@ -3,48 +3,48 @@ - + [[10, 20], + [20, 25], + [25, 40], + [40, 60]]]" id="434cbbd8" long_name="bar" points="[[ 2.5, 7.5, 12.5, 17.5], + [10. , 17.5, 27.5, 42.5], + [15. , 22.5, 32.5, 50. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + [[ -5, 10], + [ 10, 18], + [ 18, 55], + [ 18, 70]]]" id="b0d35dcf" long_name="foo" points="[[ -7.5, 7.5, 22.5, 37.5], + [-12.5, 4. , 26.5, 47.5], + [ 2.5, 14. , 36.5, 44. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/unit/cube/Cube/xml/cell_measures.cml b/lib/iris/tests/results/unit/cube/Cube/xml/cell_measures.cml index 9003ecbbe0..02cf060918 100644 --- a/lib/iris/tests/results/unit/cube/Cube/xml/cell_measures.cml +++ b/lib/iris/tests/results/unit/cube/Cube/xml/cell_measures.cml @@ -3,60 +3,60 @@ - + [[10, 20], + [20, 25], + [25, 40], + [40, 60]]]" id="434cbbd8" long_name="bar" points="[[ 2.5, 7.5, 12.5, 17.5], + [10. , 17.5, 27.5, 42.5], + [15. , 22.5, 32.5, 50. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + [[ -5, 10], + [ 10, 18], + [ 18, 55], + [ 18, 70]]]" id="b0d35dcf" long_name="foo" points="[[ -7.5, 7.5, 22.5, 37.5], + [-12.5, 4. , 26.5, 47.5], + [ 2.5, 14. , 36.5, 44. ]]" shape="(3, 4)" units="Unit('1')" value_type="float64"/> - + - + [[0., 0., 0., 0.], + [0., 0., 0., 0.], + [0., 0., 0., 0.]]]" id="9ff02736" long_name="madeup" measure="volume" shape="(2, 3, 4)" units="Unit('m3')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_realization.cml b/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_realization.cml index 0a8b19cc2b..e010a668c5 100644 --- a/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_realization.cml +++ b/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_realization.cml @@ -4,19 +4,19 @@ + 0]" shape="(17,)" standard_name="forecast_period" units="Unit('1')" value_type="int64"/> + 10, 10, 11, 11, 10]" shape="(17,)" standard_name="forecast_reference_time" units="Unit('1')" value_type="int64"/> + 3]" shape="(17,)" standard_name="realization" units="Unit('1')" value_type="int64"/> + 2]" shape="(17,)" standard_name="time" units="Unit('1')" value_type="int64"/> diff --git a/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_triple.cml b/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_triple.cml index edd4e8c9f7..4c44da7d4c 100644 --- a/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_triple.cml +++ b/lib/iris/tests/results/unit/cube/CubeList/merge__time_triple/combination_with_extra_triple.cml @@ -4,19 +4,19 @@ + 1]" shape="(17,)" standard_name="forecast_period" units="Unit('1')" value_type="int64"/> + 10, 10, 11, 11, 11]" shape="(17,)" standard_name="forecast_reference_time" units="Unit('1')" value_type="int64"/> + 2]" shape="(17,)" standard_name="realization" units="Unit('1')" value_type="int64"/> + 3]" shape="(17,)" standard_name="time" units="Unit('1')" value_type="int64"/> diff --git a/lib/iris/tests/results/unit/experimental/stratify/relevel/Test/multi_dim_target_levels.cml b/lib/iris/tests/results/unit/experimental/stratify/relevel/Test/multi_dim_target_levels.cml index 132ac9887e..b95f1cfd5e 100644 --- a/lib/iris/tests/results/unit/experimental/stratify/relevel/Test/multi_dim_target_levels.cml +++ b/lib/iris/tests/results/unit/experimental/stratify/relevel/Test/multi_dim_target_levels.cml @@ -9,9 +9,9 @@ - + [[12]]]" shape="(2, 1, 1)" units="Unit('1')" value_type="int32"/> diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl diff --git a/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml index 52aae1eb5e..47814067a4 100644 --- a/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml +++ b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml @@ -3,18 +3,18 @@ - + - + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml index abaebd51d6..f0b86b2488 100644 --- a/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml @@ -3,118 +3,154 @@ - + [[ 61.25 , 50.49913401, 75. , + 75. ], + [ 50.49913401, 37.10216606, 75. , + 75. ], + [ 37.10216606, 31.25642246, 75. , + 75. ], + [ 31.25642246, 36.40577724, 75. , + 75. ], + [ 36.40577724, 49.53429605, 75. , + 75. ], + [ 49.53429605, 61.25 , 75. , + 75. ]]]" id="4a0cb9d8" points="[[-70.79611457, -74.52039586, -79.04767474, + -79.25954365, -74.83895678, -70.95990428], + [-34.98996349, -46.35178848, -59.72147857, + -60.34014532, -47.30539587, -35.49921565], + [ 1.97619064, -10.62625029, -22.85932423, + -23.34910237, -11.59548272, 1.36966775], + [ 38.91370839, 25.53103737, 14.31169927, + 13.91632407, 24.58541465, 38.21533886], + [ 74.19713964, 60.25847406, 51.32460204, + 51.01627348, 59.44583014, 73.26834378]]" shape="(5, 6)" standard_name="latitude" units="Unit('degrees')" value_type="float64"/> - + [[ -60. , 21.98365957, 120. , + 120. ], + [ 21.98365957, 72.58386797, 120. , + 120. ], + [ 72.58386797, 118.48335809, 120. , + 120. ], + [ 118.48335809, 164.29858322, 120. , + 120. ], + [ 164.29858322, -145.72980597, 120. , + 120. ], + [-145.72980597, -60. , 120. , + 120. ]]]" id="62e940e0" points="[[ -50.71754472, -40.98262669, -46.74020973, + -71.93761444, -79.29296667, -70.14565996], + [ -29.86690506, 17.60638978, 77.93623025, + 157.14479498, -141.03702223, -93.17170324], + [ -23.13943014, 31.00689379, 87.69861699, + 148.32166238, -154.63858771, -100.50500328], + [ -16.0543062 , 41.21800297, 92.76054229, + 130.95135597, -164.73841441, -108.10521629], + [ 10.85977909, 61.77988295, 100.23612308, + 137.28481933, 175.51112189, -135.44594391]]" shape="(5, 6)" standard_name="longitude" units="Unit('degrees')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml index bf8902bcb2..028979b7a4 100644 --- a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml @@ -3,17 +3,17 @@ - + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml index e1760775f9..51fd3991d3 100644 --- a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml @@ -3,15 +3,15 @@ - + - + diff --git a/lib/iris/tests/results/uri_callback/pp_global.cml b/lib/iris/tests/results/uri_callback/pp_global.cml index 4e493f486d..6bfdf4d610 100644 --- a/lib/iris/tests/results/uri_callback/pp_global.cml +++ b/lib/iris/tests/results/uri_callback/pp_global.cml @@ -9,36 +9,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml index e7c799f397..162fa47f88 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml @@ -8,10 +8,10 @@ - + - + @@ -21,26 +21,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml index 66cbc7206b..4624ab6701 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml @@ -8,10 +8,10 @@ - + - + @@ -21,26 +21,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml index af298945f0..8e345390fa 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml @@ -9,10 +9,10 @@ - + - + @@ -22,26 +22,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml index 3a55c44f2f..8648f88f1e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml @@ -8,36 +8,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml index 1ab309af4e..c935249403 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml @@ -6,25 +6,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml index 0bf359e9c4..0b8d1b386a 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml @@ -9,37 +9,41 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml index e5cec55565..eaacfdb569 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml @@ -8,20 +8,23 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml index 5a7a6441a4..6fb7501788 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml @@ -9,129 +9,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml index cf7b207be9..6977467e33 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml @@ -11,21 +11,21 @@ - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml index 51ab62f9aa..6294dcef2a 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml @@ -8,34 +8,35 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml index 55a60a7cd6..235e2a2177 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml @@ -8,39 +8,41 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml index 2736fe9aa6..cbbce59ec3 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml @@ -8,26 +8,25 @@ - + - + @@ -37,42 +36,43 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml index 8c4ee7df19..21d8da958b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml @@ -7,34 +7,34 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml index 83f7502ba5..dcb211e11b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml @@ -8,12 +8,12 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml index fb6fa8a599..c3a675186b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml @@ -15,10 +15,10 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml index f2c30b37ef..148fa0b3d6 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml @@ -8,17 +8,17 @@ - + - + @@ -28,33 +28,35 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml index cc5f574799..fb0c81a1d6 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml @@ -8,17 +8,17 @@ - + - + @@ -28,33 +28,35 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml index 9fe3e1cb1c..a259deb0ba 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml @@ -8,46 +8,48 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml index 71c005b916..4e97c06748 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml @@ -8,32 +8,33 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml index 642dadc721..1dd5510481 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml @@ -7,33 +7,48 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml index f0bb9dc293..ffb7b84b78 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml @@ -8,87 +8,102 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml index 5549d7cebe..9a8021dc15 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml @@ -8,48 +8,51 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml index b484ebb305..a9a512398f 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml @@ -8,24 +8,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml index c594c748cd..899d997f26 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml @@ -8,37 +8,41 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml index ffcf430c02..1a60704482 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml @@ -8,16 +8,17 @@ - + - + - + @@ -27,13 +28,13 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml index 44999e85b7..e247d6a821 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml @@ -7,10 +7,10 @@ - + - + @@ -20,26 +20,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml index 990fa0d7fe..1f9d905a3d 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml @@ -7,10 +7,10 @@ - + - + @@ -20,26 +20,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml index 43789498c1..6e7907acd9 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml @@ -8,10 +8,10 @@ - + - + @@ -21,26 +21,28 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.16.202.000128.1860.09.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.16.202.000128.1860.09.01.00.00.b.cml index 54e3824d6d..aed9a4e679 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.16.202.000128.1860.09.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.16.202.000128.1860.09.01.00.00.b.cml @@ -7,36 +7,51 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.cml index 7fef4515a2..84bcd515cf 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.cml @@ -3,25 +3,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/002000000000.44.101.131200.1920.09.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/002000000000.44.101.131200.1920.09.01.00.00.b.cml index f41f4a284a..31a571d6f1 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/002000000000.44.101.131200.1920.09.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/002000000000.44.101.131200.1920.09.01.00.00.b.cml @@ -8,37 +8,41 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/008000000000.44.101.000128.1890.09.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/008000000000.44.101.000128.1890.09.01.00.00.b.cml index e8d2cb7735..ffc9dac60f 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/008000000000.44.101.000128.1890.09.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/008000000000.44.101.000128.1890.09.01.00.00.b.cml @@ -7,20 +7,23 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/12187.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/12187.b.cml index 7e35faceda..2d61cf4fee 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/12187.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/12187.b.cml @@ -8,129 +8,140 @@ - + - + - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/HadCM2_ts_SAT_ann_18602100.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/HadCM2_ts_SAT_ann_18602100.b.cml index 69f597d697..8b68a7edb7 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/HadCM2_ts_SAT_ann_18602100.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/HadCM2_ts_SAT_ann_18602100.b.cml @@ -10,25 +10,25 @@ - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_level_lat_orig.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_level_lat_orig.b.cml index d24575ca39..720e956c16 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_level_lat_orig.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_level_lat_orig.b.cml @@ -7,34 +7,35 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_press_orig.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_press_orig.b.cml index 358208d7b3..9c94da141e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_press_orig.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_press_orig.b.cml @@ -7,39 +7,41 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_several.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_several.b.cml index 93f156d33d..f95bfa4279 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_several.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_lon_lat_several.b.cml @@ -7,26 +7,25 @@ - + - + @@ -36,42 +35,43 @@ - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_n10r13xy.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_n10r13xy.b.cml index 33ec73acee..9d4be31b02 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_n10r13xy.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_n10r13xy.b.cml @@ -6,34 +6,34 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_time_press.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_time_press.b.cml index ad54273ae2..be319dd37e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_time_press.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_time_press.b.cml @@ -7,12 +7,12 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_tseries.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_tseries.b.cml index 6f3a5af440..e861691897 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_tseries.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/aaxzc_tseries.b.cml @@ -14,10 +14,10 @@ - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abcza_pa19591997_daily_29.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abcza_pa19591997_daily_29.b.cml index f3a4d8dd0d..61f1875148 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abcza_pa19591997_daily_29.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abcza_pa19591997_daily_29.b.cml @@ -7,17 +7,17 @@ - + - + @@ -27,33 +27,35 @@ - + - + - + @@ -70,17 +72,17 @@ - + - + @@ -90,33 +92,35 @@ - + - + - + @@ -133,46 +137,48 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abxpa_press_lat.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abxpa_press_lat.b.cml index 0fbbd81c74..ca81b9cba0 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abxpa_press_lat.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/abxpa_press_lat.b.cml @@ -7,32 +7,33 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/integer.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/integer.b.cml index 9cad8ea176..6a9fc5a005 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/integer.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/integer.b.cml @@ -6,33 +6,48 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/model.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/model.b.cml index 2f8db655cc..08329b3f56 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/model.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/model.b.cml @@ -7,87 +7,102 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/ocean_xsect.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/ocean_xsect.b.cml index cd4e742da9..8196f78d26 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/ocean_xsect.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/ocean_xsect.b.cml @@ -7,48 +7,51 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc699.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc699.b.cml index 0a9e0a7bcb..f93e7b7205 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc699.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc699.b.cml @@ -7,24 +7,25 @@ - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc942.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc942.b.cml index fd775483e9..f0899f570e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc942.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st0fc942.b.cml @@ -7,37 +7,41 @@ - + - + - + - + - + - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st30211.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st30211.b.cml index 2a49cf6c71..1474b7c502 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st30211.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/st30211.b.cml @@ -7,16 +7,17 @@ - + - + - + @@ -26,13 +27,13 @@ - + - + diff --git a/lib/iris/tests/runner/__main__.py b/lib/iris/tests/runner/__main__.py deleted file mode 100644 index 9f9c51c1f7..0000000000 --- a/lib/iris/tests/runner/__main__.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Provides testing capabilities for installed copies of Iris. - -""" - -import argparse - -from ._runner import TestRunner - -parser = argparse.ArgumentParser( - "iris.tests", description=TestRunner.description -) -for long_opt, short_opt, help_text in TestRunner.user_options: - long_opt = long_opt.strip("=") - if long_opt in TestRunner.boolean_options: - parser.add_argument( - "--" + long_opt, - "-" + short_opt, - action="store_true", - help=help_text, - ) - else: - parser.add_argument("--" + long_opt, "-" + short_opt, help=help_text) -args = parser.parse_args() - -runner = TestRunner() - -runner.initialize_options() -for long_opt, short_opt, help_text in TestRunner.user_options: - arg = long_opt.replace("-", "_").strip("=") - setattr(runner, arg, getattr(args, arg)) -runner.finalize_options() - -runner.run() diff --git a/lib/iris/tests/runner/_runner.py b/lib/iris/tests/runner/_runner.py deleted file mode 100644 index bfb2cc2402..0000000000 --- a/lib/iris/tests/runner/_runner.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Provides testing capabilities for installed copies of Iris. - -""" - -# Because this file is imported by setup.py, there may be additional runtime -# imports later in the file. -import os -import sys - - -# NOTE: Do not inherit from object as distutils does not like it. -class TestRunner: - """Run the Iris tests under pytest and pytest-xdist for performance""" - - description = ( - "Run tests under pytest and pytest-xdist for performance. " - "Default behaviour is to run all non-gallery tests. " - "Specifying one or more test flags will run *only* those " - "tests." - ) - user_options = [ - ( - "no-data", - "n", - "Override the paths to the data repositories so it " - "appears to the tests that it does not exist.", - ), - ("stop", "x", "Stop running tests after the first error or failure."), - ("system-tests", "s", "Run the limited subset of system tests."), - ("gallery-tests", "e", "Run the gallery code tests."), - ("default-tests", "d", "Run the default tests."), - ( - "coding-tests", - "c", - "Run the coding standards tests. (These are a " - "subset of the default tests.)", - ), - ( - "num-processors=", - "p", - "The number of processors used for running " "the tests.", - ), - ("create-missing", "m", "Create missing test result files."), - ] - boolean_options = [ - "no-data", - "system-tests", - "stop", - "gallery-tests", - "default-tests", - "coding-tests", - "create-missing", - ] - - def initialize_options(self): - self.no_data = False - self.stop = False - self.system_tests = False - self.gallery_tests = False - self.default_tests = False - self.coding_tests = False - self.num_processors = None - self.create_missing = False - - def finalize_options(self): - # These environment variables will be propagated to all the - # processes that pytest-xdist creates. - if self.no_data: - print("Running tests in no-data mode...") - import iris.config - - iris.config.TEST_DATA_DIR = None - if self.create_missing: - os.environ["IRIS_TEST_CREATE_MISSING"] = "true" - - tests = [] - if self.system_tests: - tests.append("system") - if self.default_tests: - tests.append("default") - if self.coding_tests: - tests.append("coding") - if self.gallery_tests: - tests.append("gallery") - if not tests: - tests.append("default") - print("Running test suite(s): {}".format(", ".join(tests))) - if self.stop: - print("Stopping tests after the first error or failure") - if self.num_processors is None: - self.num_processors = "auto" - else: - self.num_processors = int(self.num_processors) - - def run(self): - import pytest - - if hasattr(self, "distribution") and self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - tests = [] - if self.system_tests: - tests.append("lib/iris/tests/system_test.py") - if self.default_tests: - tests.append("lib/iris/tests") - if self.coding_tests: - tests.append("lib/iris/tests/test_coding_standards.py") - if self.gallery_tests: - import iris.config - - default_doc_path = os.path.join(sys.path[0], "docs") - doc_path = iris.config.get_option( - "Resources", "doc_dir", default=default_doc_path - ) - gallery_path = os.path.join(doc_path, "gallery_tests") - if os.path.exists(gallery_path): - tests.append(gallery_path) - else: - print( - "WARNING: Gallery path %s does not exist." % (gallery_path) - ) - if not tests: - tests.append("lib/iris/tests") - - args = [ - None, - f"-n={self.num_processors}", - ] - - if self.stop: - args.append("-x") - - result = True - for test in tests: - args[0] = test - print() - print( - f"Running test discovery on {test} with {self.num_processors} processors." - ) - retcode = pytest.main(args=args) - result &= retcode.value == 0 - if result is False: - exit(1) diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index ff96ecc35e..4733a15305 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -118,7 +118,7 @@ def sample_2d_latlons(regional=False, rotated=False, transformed=False): """ Construct small 2d cubes with 2d X and Y coordinates. - This makes cubes with 'expanded' coordinates (4 bounds per cell), analagous + This makes cubes with 'expanded' coordinates (4 bounds per cell), analogous to ORCA data. The coordinates are always geographical, so either it has a coord system or they are "true" lats + lons. @@ -296,7 +296,9 @@ def sample_cube(xargs, yargs): return cube -def make_bounds_discontiguous_at_point(cube, at_iy, at_ix, in_y=False): +def make_bounds_discontiguous_at_point( + cube, at_iy, at_ix, in_y=False, upper=True +): """ Meddle with the XY grid bounds of a 2D cube to make the grid discontiguous. @@ -325,16 +327,22 @@ def adjust_coord(coord): if not in_y: # Make a discontinuity "at" (iy, ix), by moving the right-hand edge # of the cell to the midpoint of the existing left+right bounds. - new_bds_br = 0.5 * (bds_bl + bds_br) - new_bds_tr = 0.5 * (bds_tl + bds_tr) - bds_br, bds_tr = new_bds_br, new_bds_tr + new_bds_b = 0.5 * (bds_bl + bds_br) + new_bds_t = 0.5 * (bds_tl + bds_tr) + if upper: + bds_br, bds_tr = new_bds_b, new_bds_t + else: + bds_bl, bds_tl = new_bds_b, new_bds_t else: # Same but in the 'grid y direction' : # Make a discontinuity "at" (iy, ix), by moving the **top** edge of # the cell to the midpoint of the existing **top+bottom** bounds. - new_bds_tl = 0.5 * (bds_bl + bds_tl) - new_bds_tr = 0.5 * (bds_br + bds_tr) - bds_tl, bds_tr = new_bds_tl, new_bds_tr + new_bds_l = 0.5 * (bds_bl + bds_tl) + new_bds_r = 0.5 * (bds_br + bds_tr) + if upper: + bds_tl, bds_tr = new_bds_l, new_bds_r + else: + bds_bl, bds_br = new_bds_l, new_bds_r # Write in the new bounds (all 4 corners). bds[at_iy, at_ix] = [bds_bl, bds_br, bds_tr, bds_tl] @@ -355,7 +363,16 @@ def adjust_coord(coord): msg = "The coordinate {!r} doesn't span a data dimension." raise ValueError(msg.format(coord.name())) - masked_data = ma.masked_array(cube.data) - masked_data[at_iy, at_ix] = ma.masked + masked_data = ma.masked_array(cube.data) + + # Mask all points which would be found discontiguous. + # Note that find_discontiguities finds all instances where a cell is + # discontiguous with a neighbouring cell to its *right* or *above* + # that cell. + masked_data[at_iy, at_ix] = ma.masked + if in_y or not upper: + masked_data[at_iy, at_ix - 1] = ma.masked + if not in_y or not upper: + masked_data[at_iy - 1, at_ix] = ma.masked cube.data = masked_data diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index e32f065625..bf93f01f6b 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -12,9 +12,9 @@ import dask from dask import array as da -import netCDF4 import numpy as np +from iris.fileformats.netcdf import _thread_safe_nc from iris.tests import env_bin_path NCGEN_PATHSTR = str(env_bin_path("ncgen")) @@ -100,7 +100,7 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): """ - ds = netCDF4.Dataset(nc_path, "r+") + ds = _thread_safe_nc.DatasetWrapper(nc_path, "r+") unlimited_dim_names = [ dim for dim in ds.dimensions if ds.dimensions[dim].isunlimited() diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index 90bf0e5d4e..e5614f6b63 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -413,6 +413,30 @@ def test_single(self): aggregateby_cube.data, self.single_rms_expected ) + def test_str_aggregation_single_weights_none(self): + # mean group-by with single coordinate name. + aggregateby_cube = self.cube_single.aggregated_by( + "height", iris.analysis.MEAN, weights=None + ) + self.assertCML( + aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, self.single_expected + ) + + def test_coord_aggregation_single_weights_none(self): + # mean group-by with single coordinate. + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, iris.analysis.MEAN, weights=None + ) + self.assertCML( + aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, self.single_expected + ) + def test_weighted_single(self): # weighted mean group-by with single coordinate name. aggregateby_cube = self.cube_single.aggregated_by( @@ -1328,5 +1352,153 @@ def test_weights_fail_with_non_weighted_aggregator(self): ) +# Simply redo the tests of TestAggregateBy with other cubes as weights +# Note: other weights types (e.g., coordinates, cell measures, etc.) are not +# tested this way here since this would require adding dimensional metadata +# objects to the cubes, which would change the CMLs of all resulting cubes of +# TestAggregateBy. + + +class TestAggregateByWeightedByCube(TestAggregateBy): + def setUp(self): + super().setUp() + + self.weights_single = self.cube_single[:, 0, 0].copy( + self.weights_single + ) + self.weights_single.units = "m2" + self.weights_multi = self.cube_multi[:, 0, 0].copy(self.weights_multi) + self.weights_multi.units = "m2" + + def test_str_aggregation_weighted_sum_single(self): + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.SUM, + weights=self.weights_single, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_weighted_sum_single(self): + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + iris.analysis.SUM, + weights=self.weights_single, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_str_aggregation_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_str_aggregation_rev_order_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + iris.analysis.SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + iris.analysis.SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_rev_order_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + iris.analysis.SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + +class TestAggregateByWeightedByObj(tests.IrisTest): + def setUp(self): + self.dim_coord = iris.coords.DimCoord( + [0, 1, 2], standard_name="latitude", units="degrees" + ) + self.aux_coord = iris.coords.AuxCoord( + [0, 1, 1], long_name="auxcoord", units="kg" + ) + self.cell_measure = iris.coords.CellMeasure( + [0, 0, 0], standard_name="cell_area", units="m2" + ) + self.ancillary_variable = iris.coords.AncillaryVariable( + [1, 1, 1], var_name="ancvar", units="kg" + ) + self.cube = iris.cube.Cube( + [1, 2, 3], + standard_name="air_temperature", + units="K", + dim_coords_and_dims=[(self.dim_coord, 0)], + aux_coords_and_dims=[(self.aux_coord, 0)], + cell_measures_and_dims=[(self.cell_measure, 0)], + ancillary_variables_and_dims=[(self.ancillary_variable, 0)], + ) + + def test_weighting_with_str_dim_coord(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights="latitude" + ) + np.testing.assert_array_equal(res_cube.data, [0, 8]) + self.assertEqual(res_cube.units, "K degrees") + + def test_weighting_with_str_aux_coord(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights="auxcoord" + ) + np.testing.assert_array_equal(res_cube.data, [0, 5]) + self.assertEqual(res_cube.units, "K kg") + + def test_weighting_with_str_cell_measure(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights="cell_area" + ) + np.testing.assert_array_equal(res_cube.data, [0, 0]) + self.assertEqual(res_cube.units, "K m2") + + def test_weighting_with_str_ancillary_variable(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights="ancvar" + ) + np.testing.assert_array_equal(res_cube.data, [1, 5]) + self.assertEqual(res_cube.units, "K kg") + + def test_weighting_with_dim_coord(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights=self.dim_coord + ) + np.testing.assert_array_equal(res_cube.data, [0, 8]) + self.assertEqual(res_cube.units, "K degrees") + + def test_weighting_with_aux_coord(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights=self.aux_coord + ) + np.testing.assert_array_equal(res_cube.data, [0, 5]) + self.assertEqual(res_cube.units, "K kg") + + def test_weighting_with_cell_measure(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights=self.cell_measure + ) + np.testing.assert_array_equal(res_cube.data, [0, 0]) + self.assertEqual(res_cube.units, "K m2") + + def test_weighting_with_ancillary_variable(self): + res_cube = self.cube.aggregated_by( + "auxcoord", iris.analysis.SUM, weights=self.ancillary_variable + ) + np.testing.assert_array_equal(res_cube.data, [1, 5]) + self.assertEqual(res_cube.units, "K kg") + + if __name__ == "__main__": unittest.main() diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index e0a5d0971e..4b36a915aa 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -12,6 +12,7 @@ import dask.array as da import numpy as np import numpy.ma as ma +import pytest import iris import iris.analysis.cartography @@ -288,7 +289,7 @@ def test_weighted_mean(self): iris.analysis.MEAN, ) - # Test collpasing of non data coord + # Test collapsing of non data coord self.assertRaises( iris.exceptions.CoordinateCollapseError, e.collapsed, @@ -1702,5 +1703,248 @@ def test_weights_in_kwargs(self): self.assertEqual(kwargs, {"test_kwarg": "test", "weights": "ignored"}) +class TestWeights: + @pytest.fixture(autouse=True) + def setup_test_data(self): + self.lat = iris.coords.DimCoord( + [0, 1], standard_name="latitude", units="degrees" + ) + self.lon = iris.coords.DimCoord( + [0, 1, 2], standard_name="longitude", units="degrees" + ) + self.cell_measure = iris.coords.CellMeasure( + np.arange(6).reshape(2, 3), standard_name="cell_area", units="m2" + ) + self.aux_coord = iris.coords.AuxCoord( + [3, 4], long_name="auxcoord", units="s" + ) + self.ancillary_variable = iris.coords.AncillaryVariable( + [5, 6, 7], var_name="ancvar", units="kg" + ) + self.cube = iris.cube.Cube( + np.arange(6).reshape(2, 3), + standard_name="air_temperature", + units="K", + dim_coords_and_dims=[(self.lat, 0), (self.lon, 1)], + aux_coords_and_dims=[(self.aux_coord, 0)], + cell_measures_and_dims=[(self.cell_measure, (0, 1))], + ancillary_variables_and_dims=[(self.ancillary_variable, 1)], + ) + + def test_init_with_weights(self): + weights = iris.analysis._Weights([], self.cube) + new_weights = iris.analysis._Weights(weights, self.cube) + assert isinstance(new_weights, iris.analysis._Weights) + assert new_weights is not weights + np.testing.assert_array_equal(new_weights, []) + assert new_weights.units == "1" + assert weights.units == "1" + + def test_init_with_weights_and_units(self): + weights = iris.analysis._Weights([], self.cube) + new_weights = iris.analysis._Weights(weights, self.cube, units="J") + assert isinstance(new_weights, iris.analysis._Weights) + assert new_weights is not weights + np.testing.assert_array_equal(new_weights, []) + assert new_weights.units == "J" + assert weights.units == "1" + + def test_init_with_cube(self): + weights = iris.analysis._Weights(self.cube, self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "K" + + def test_init_with_cube_and_units(self): + weights = iris.analysis._Weights(self.cube, self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "J" + + def test_init_with_str_dim_coord(self): + weights = iris.analysis._Weights("latitude", self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + assert weights.units == "degrees" + + def test_init_with_str_dim_coord_and_units(self): + weights = iris.analysis._Weights("latitude", self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + assert weights.units == "J" + + def test_init_with_str_aux_coord(self): + weights = iris.analysis._Weights("auxcoord", self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + assert weights.units == "s" + + def test_init_with_str_aux_coord_and_units(self): + weights = iris.analysis._Weights("auxcoord", self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + assert weights.units == "J" + + def test_init_with_str_ancillary_variable(self): + weights = iris.analysis._Weights("ancvar", self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + assert weights.units == "kg" + + def test_init_with_str_ancillary_variable_and_units(self): + weights = iris.analysis._Weights("ancvar", self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + assert weights.units == "J" + + def test_init_with_str_cell_measure(self): + weights = iris.analysis._Weights("cell_area", self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "m2" + + def test_init_with_str_cell_measure_and_units(self): + weights = iris.analysis._Weights("cell_area", self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "J" + + def test_init_with_dim_coord(self): + weights = iris.analysis._Weights(self.lat, self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + assert weights.units == "degrees" + + def test_init_with_dim_coord_and_units(self): + weights = iris.analysis._Weights(self.lat, self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + assert weights.units == "J" + + def test_init_with_aux_coord(self): + weights = iris.analysis._Weights(self.aux_coord, self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + assert weights.units == "s" + + def test_init_with_aux_coord_and_units(self): + weights = iris.analysis._Weights(self.aux_coord, self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + assert weights.units == "J" + + def test_init_with_ancillary_variable(self): + weights = iris.analysis._Weights(self.ancillary_variable, self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + assert weights.units == "kg" + + def test_init_with_ancillary_variable_and_units(self): + weights = iris.analysis._Weights( + self.ancillary_variable, self.cube, units="J" + ) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + assert weights.units == "J" + + def test_init_with_cell_measure(self): + weights = iris.analysis._Weights(self.cell_measure, self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "m2" + + def test_init_with_cell_measure_and_units(self): + weights = iris.analysis._Weights( + self.cell_measure, self.cube, units="J" + ) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + assert weights.units == "J" + + def test_init_with_list(self): + weights = iris.analysis._Weights([1, 2, 3], self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [1, 2, 3]) + assert weights.units == "1" + + def test_init_with_list_and_units(self): + weights = iris.analysis._Weights([1, 2, 3], self.cube, units="J") + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, [1, 2, 3]) + assert weights.units == "J" + + def test_init_with_ndarray(self): + weights = iris.analysis._Weights(np.zeros((5, 5)), self.cube) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.zeros((5, 5))) + assert weights.units == "1" + + def test_init_with_ndarray_and_units(self): + weights = iris.analysis._Weights( + np.zeros((5, 5)), self.cube, units="J" + ) + assert isinstance(weights, iris.analysis._Weights) + np.testing.assert_array_equal(weights, np.zeros((5, 5))) + assert weights.units == "J" + + def test_init_with_invalid_obj(self): + with pytest.raises(KeyError): + iris.analysis._Weights("invalid_obj", self.cube) + + def test_init_with_invalid_obj_and_units(self): + with pytest.raises(KeyError): + iris.analysis._Weights("invalid_obj", self.cube, units="J") + + def test_update_kwargs_no_weights(self): + kwargs = {"test": [1, 2, 3]} + iris.analysis._Weights.update_kwargs(kwargs, self.cube) + assert kwargs == {"test": [1, 2, 3]} + + def test_update_kwargs_weights_none(self): + kwargs = {"test": [1, 2, 3], "weights": None} + iris.analysis._Weights.update_kwargs(kwargs, self.cube) + assert kwargs == {"test": [1, 2, 3], "weights": None} + + def test_update_kwargs_weights(self): + kwargs = {"test": [1, 2, 3], "weights": [1, 2]} + iris.analysis._Weights.update_kwargs(kwargs, self.cube) + assert len(kwargs) == 2 + assert kwargs["test"] == [1, 2, 3] + assert isinstance(kwargs["weights"], iris.analysis._Weights) + np.testing.assert_array_equal(kwargs["weights"], [1, 2]) + assert kwargs["weights"].units == "1" + + +def test__Groupby_repr(): + groupby_coord = iris.coords.AuxCoord([2000, 2000], var_name="year") + shared_coord = iris.coords.DimCoord( + [0, 1], + var_name="time", + units=cf_units.Unit("days since 2000-01-01"), + ) + grouper = iris.analysis._Groupby([groupby_coord], [(shared_coord, 0)]) + assert repr(grouper) == "_Groupby(['year'], shared_coords=['time'])" + + +CUBE = iris.cube.Cube(0) + + +@pytest.mark.parametrize( + "kwargs,expected", + [ + ({}, "s"), + ({"test": "m"}, "s"), + ({"weights": None}, "s"), + ({"weights": [1, 2, 3]}, "s"), + ({"weights": iris.analysis._Weights([1], CUBE)}, "s"), + ({"weights": iris.analysis._Weights([1], CUBE, units="kg")}, "s kg"), + ], +) +def test_sum_units_func(kwargs, expected): + units = cf_units.Unit("s") + result = iris.analysis._sum_units_func(units, **kwargs) + assert result == expected + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index 6c08dc1f9e..4d92b9a92c 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -589,9 +589,9 @@ def test_apply_ufunc(self): in_place=False, ) - ans = a.data**2 + answer = a.data**2 - self.assertArrayEqual(b.data, ans) + self.assertArrayEqual(b.data, answer) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) @@ -603,10 +603,10 @@ def vec_mag(u, v): vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) b = iris.analysis.maths.apply_ufunc(vec_mag_ufunc, a, c) - ans = a.data**2 + c.data**2 + answer = a.data**2 + c.data**2 b2 = b**2 - self.assertArrayAlmostEqual(b2.data, ans) + self.assertArrayAlmostEqual(b2.data, answer) class TestIFunc(tests.IrisTest): @@ -620,9 +620,9 @@ def test_ifunc(self): my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units**2) b = my_ifunc(a, new_name="more_thingness", in_place=False) - ans = a.data**2 + answer = a.data**2 - self.assertArrayEqual(b.data, ans) + self.assertArrayEqual(b.data, answer) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) @@ -637,9 +637,9 @@ def vec_mag(u, v): ) b = my_ifunc(a, c) - ans = (a.data**2 + c.data**2) ** 0.5 + answer = (a.data**2 + c.data**2) ** 0.5 - self.assertArrayAlmostEqual(b.data, ans) + self.assertArrayAlmostEqual(b.data, answer) def vec_mag_data_func(u_data, v_data): return np.sqrt(u_data**2 + v_data**2) @@ -654,10 +654,10 @@ def vec_mag_data_func(u_data, v_data): cs_ifunc = iris.analysis.maths.IFunc(np.cumsum, lambda a: a.units) b = cs_ifunc(a, axis=1) - ans = a.data.copy() - ans = np.cumsum(ans, axis=1) + answer = a.data.copy() + answer = np.cumsum(answer, axis=1) - self.assertArrayAlmostEqual(b.data, ans) + self.assertArrayAlmostEqual(b.data, answer) @tests.skip_data diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index 8f2a9b474d..0abb35c566 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -486,7 +486,7 @@ def test_axis(self): coords = cube.coords(axis="x") self.assertEqual([coord.name() for coord in coords], ["longitude"]) - # Renaming shoudn't be enough + # Renaming shouldn't be enough cube.coord("an_other").rename("time") coords = cube.coords(axis="t") self.assertEqual([coord.name() for coord in coords], []) @@ -718,7 +718,7 @@ def test_cube_slice_zero_len_slice(self): def test_cube_slice_with_non_existant_coords(self): with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - self.t.slices(["dim2", "dim1", "doesnt exist"]) + self.t.slices(["dim2", "dim1", "doesn't exist"]) def test_cube_extract_coord_with_non_describing_coordinates(self): with self.assertRaises(ValueError): @@ -1478,7 +1478,7 @@ def test_coord_conversion(self): # Invalid coords invalid_choices = [ - iris.analysis.MEAN, # Caused by mixing up argument order in call to cube.collasped for example + iris.analysis.MEAN, # Caused by mixing up argument order in call to cube.collapsed for example None, ["grid_latitude", None], [lat, None], diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index bf3cddb8b7..3abd6b981b 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -15,6 +15,8 @@ import io from unittest import mock +import pytest + import iris import iris.fileformats.cf as cf @@ -52,11 +54,14 @@ def test_cached(self): @tests.skip_data class TestCFReader(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ("NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc") ) self.cfr = cf.CFReader(filename) + with self.cfr: + yield def test_ancillary_variables_pass_0(self): self.assertEqual(self.cfr.cf_group.ancillary_variables, {}) @@ -348,7 +353,8 @@ def test_cell_methods(self): @tests.skip_data class TestClimatology(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ( "NetCDF", @@ -357,6 +363,8 @@ def setUp(self): ) ) self.cfr = cf.CFReader(filename) + with self.cfr: + yield def test_bounds(self): time = self.cfr.cf_group["temp_dmax_tmean_abs"].cf_group.coordinates[ @@ -373,7 +381,8 @@ def test_bounds(self): @tests.skip_data class TestLabels(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ( "NetCDF", @@ -388,6 +397,10 @@ def setUp(self): ) self.cfr_end = cf.CFReader(filename) + with self.cfr_start: + with self.cfr_end: + yield + def test_label_dim_start(self): cf_data_var = self.cfr_start.cf_group["temp_dmax_tmean_abs"] diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 01f6f777fa..6cea9dc001 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -12,9 +12,13 @@ from fnmatch import fnmatch from glob import glob import os +from pathlib import Path import subprocess +from typing import List, Tuple import iris +from iris.fileformats.netcdf import _thread_safe_nc +from iris.tests import system_test LICENSE_TEMPLATE = """# Copyright Iris contributors # @@ -40,6 +44,95 @@ IRIS_REPO_DIRPATH = os.environ.get("IRIS_REPO_DIR", IRIS_INSTALL_DIR) +def test_netcdf4_import(): + """Use of netCDF4 must be via iris.fileformats.netcdf._thread_safe_nc .""" + # Please avoid including these phrases in any comments/strings throughout + # Iris (e.g. use "from the netCDF4 library" instead) - this allows the + # below search to remain quick and simple. + import_strings = ("import netCDF4", "from netCDF4") + + files_including_import = [] + for file_path in Path(IRIS_DIR).rglob("*.py"): + file_text = file_path.read_text() + + if any([i in file_text for i in import_strings]): + files_including_import.append(file_path) + + expected = [ + Path(_thread_safe_nc.__file__), + Path(system_test.__file__), + Path(__file__), + ] + assert set(files_including_import) == set(expected) + + +def test_python_versions(): + """ + This test is designed to fail whenever Iris' supported Python versions are + updated, insisting that versions are updated EVERYWHERE in-sync. + """ + latest_supported = "3.11" + all_supported = ["3.9", "3.10", latest_supported] + + root_dir = Path(__file__).parents[3] + workflows_dir = root_dir / ".github" / "workflows" + benchmarks_dir = root_dir / "benchmarks" + + # Places that are checked: + pyproject_toml_file = root_dir / "pyproject.toml" + requirements_dir = root_dir / "requirements" + nox_file = root_dir / "noxfile.py" + ci_wheels_file = workflows_dir / "ci-wheels.yml" + ci_tests_file = workflows_dir / "ci-tests.yml" + asv_config_file = benchmarks_dir / "asv.conf.json" + benchmark_runner_file = benchmarks_dir / "bm_runner.py" + + text_searches: List[Tuple[Path, str]] = [ + ( + pyproject_toml_file, + "\n ".join( + [ + f'"Programming Language :: Python :: {ver}",' + for ver in all_supported + ] + ), + ), + ( + nox_file, + "_PY_VERSIONS_ALL = [" + + ", ".join([f'"{ver}"' for ver in all_supported]), + ), + ( + ci_wheels_file, + "python-version: [" + + ", ".join([f'"{ver}"' for ver in all_supported]), + ), + ( + ci_tests_file, + ( + f'python-version: ["{latest_supported}"]\n' + f'{" " * 8}session: ["doctest", "gallery", "linkcheck"]' + ), + ), + (asv_config_file, f"PY_VER={latest_supported}"), + (benchmark_runner_file, f'python_version = "{latest_supported}"'), + ] + + for ver in all_supported: + req_yaml = requirements_dir / f"py{ver.replace('.', '')}.yml" + text_searches.append((req_yaml, f"- python ={ver}")) + + text_searches.append( + ( + ci_tests_file, + f'python-version: "{ver}"\n{" " * 12}session: "tests"', + ) + ) + + for path, search in text_searches: + assert search in path.read_text() + + class TestLicenseHeaders(tests.IrisTest): @staticmethod def whatchanged_parse(whatchanged_output): @@ -132,6 +225,11 @@ def test_license_headers(self): ): with open(full_fname) as fh: content = fh.read() + if content.startswith("#!"): + # account for files with leading shebang directives + # i.e., first strip out the shebang line before + # then performing license header compliance checking + content = "\n".join(content.split("\n")[1:]) if not content.startswith(LICENSE_TEMPLATE): print( "The file {} does not start with the required " diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index 968b71d292..e4c22f49b0 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -15,13 +15,22 @@ import numpy as np import numpy.ma as ma +from iris.aux_factory import HybridHeightFactory from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.cube import iris.tests.stock as stock def _make_cube( - x, y, data, aux=None, cell_measure=None, ancil=None, offset=0, scalar=None + x, + y, + data, + aux=None, + cell_measure=None, + ancil=None, + derived=None, + offset=0, + scalar=None, ): """ A convenience test function that creates a custom 2D cube. @@ -47,6 +56,18 @@ def _make_cube( A CSV string specifying which points only auxiliary coordinates to create. Accepts either of 'x', 'y', 'xy'. + * cell_measure: + A CSV string specifying which points only cell measures + coordinates to create. Accepts either of 'x', 'y', 'xy'. + + * ancil: + A CSV string specifying which points only ancillary variables + coordinates to create. Accepts either of 'x', 'y', 'xy'. + + * derived: + A CSV string specifying which points only derived coordinates + coordinates to create. Accepts either of 'x', 'y', 'xy'. + * offset: Offset value to be added to the 'xy' auxiliary coordinate points. @@ -120,6 +141,30 @@ def _make_cube( ) cube.add_ancillary_variable(av, (0, 1)) + if derived is not None: + derived = derived.split(",") + delta = AuxCoord(0.0, var_name="delta", units="m") + sigma = AuxCoord(1.0, var_name="sigma", units="1") + cube.add_aux_coord(delta, ()) + cube.add_aux_coord(sigma, ()) + if "y" in derived: + orog = AuxCoord(y_range * 10, long_name="orog", units="m") + cube.add_aux_coord(orog, 0) + elif "x" in derived: + orog = AuxCoord(x_range * 10, long_name="orog", units="m") + cube.add_aux_coord(orog, 1) + elif "xy" in derived: + payload = np.arange(y_size * x_size, dtype=np.float32).reshape( + y_size, x_size + ) + orog = AuxCoord( + payload * 100 + offset, long_name="orog", units="m" + ) + cube.add_aux_coord(orog, (0, 1)) + else: + raise NotImplementedError() + cube.add_aux_factory(HybridHeightFactory(delta, sigma, orog)) + if scalar is not None: data = np.array([scalar], dtype=np.float32) coord = AuxCoord(data, long_name="height", units="m") @@ -362,6 +407,14 @@ def test_ancil_missing(self): result = concatenate(cubes) self.assertEqual(len(result), 2) + def test_derived_coord_missing(self): + cubes = [] + y = (0, 2) + cubes.append(_make_cube((0, 2), y, 1, derived="x")) + cubes.append(_make_cube((2, 4), y, 2)) + result = concatenate(cubes) + self.assertEqual(len(result), 2) + class Test2D(tests.IrisTest): def test_masked_and_unmasked(self): @@ -736,6 +789,17 @@ def test_concat_2y2d_ancil_x_y_xy(self): self.assertEqual(result[0].shape, (6, 2)) self.assertEqual(result[0], com) + def test_concat_2y2d_derived_x_y_xy(self): + cubes = [] + x = (0, 2) + cubes.append(_make_cube(x, (0, 4), 1, derived="x,y,xy")) + cubes.append(_make_cube(x, (4, 6), 1, derived="x,y,xy")) + result = concatenate(cubes) + com = _make_cube(x, (0, 6), 1, derived="x,y,xy") + self.assertEqual(len(result), 1) + self.assertEqual(result[0].shape, (6, 2)) + self.assertEqual(result[0], com) + class TestMulti2D(tests.IrisTest): def test_concat_4x2d_aux_xy(self): diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index f2c0a9ae30..a6fc5e3149 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -348,7 +348,7 @@ def test_pp_save_rules(self): os.remove(temp_filename) - # Test mutiple process flags + # Test multiple process flags multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024)) # Maps lbproc value to the process flags that should be created diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py index d1ebc9a36a..57b748e52f 100644 --- a/lib/iris/tests/test_lazy_aggregate_by.py +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -6,6 +6,7 @@ import unittest from iris._lazy_data import as_lazy_data +from iris.analysis import SUM from iris.tests import test_aggregate_by @@ -44,5 +45,65 @@ def tearDown(self): assert self.cube_easy.has_lazy_data() +class TestLazyAggregateByWeightedByCube(TestLazyAggregateBy): + def setUp(self): + super().setUp() + + self.weights_single = self.cube_single[:, 0, 0].copy( + self.weights_single + ) + self.weights_single.units = "m2" + self.weights_multi = self.cube_multi[:, 0, 0].copy(self.weights_multi) + self.weights_multi.units = "m2" + + def test_str_aggregation_weighted_sum_single(self): + aggregateby_cube = self.cube_single.aggregated_by( + "height", + SUM, + weights=self.weights_single, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_weighted_sum_single(self): + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + SUM, + weights=self.weights_single, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_str_aggregation_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_str_aggregation_rev_order_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + def test_coord_aggregation_rev_order_weighted_sum_multi(self): + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + SUM, + weights=self.weights_multi, + ) + self.assertEqual(aggregateby_cube.units, "kelvin m2") + + if __name__ == "__main__": unittest.main() diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 4749236abc..adb33924e5 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -14,9 +14,8 @@ import pathlib from unittest import mock -import netCDF4 - import iris +from iris.fileformats.netcdf import _thread_safe_nc import iris.io @@ -193,10 +192,11 @@ def test_netCDF_Dataset_call(self): filename = tests.get_data_path( ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") ) - fake_dataset = netCDF4.Dataset(filename) + fake_dataset = _thread_safe_nc.DatasetWrapper(filename) with mock.patch( - "netCDF4.Dataset", return_value=fake_dataset + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=fake_dataset, ) as dataset_loader: next(iris.io.load_http([self.url], callback=None)) dataset_loader.assert_called_with(self.url, mode="r") diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 92e15a414a..6438140ed9 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -19,7 +19,6 @@ import tempfile from unittest import mock -import netCDF4 as nc import numpy as np import numpy.ma as ma @@ -29,6 +28,7 @@ import iris.coord_systems as icoord_systems from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf +from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.tests.stock as stock @@ -81,7 +81,7 @@ def test_missing_time_bounds(self): ("NetCDF", "global", "xyt", "SMALL_hires_wind_u_for_ipcc4.nc") ) shutil.copyfile(src, filename) - dataset = nc.Dataset(filename, mode="a") + dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a") dataset.renameVariable("time_bnds", "foo") dataset.close() _ = iris.load_cube(filename, "eastward_wind") @@ -96,7 +96,7 @@ def test_load_global_xyzt_gems(self): cubes = sorted(cubes, key=lambda cube: cube.name()) self.assertCML(cubes, ("netcdf", "netcdf_global_xyzt_gems.cml")) - # Check the masked array fill value is propogated through the data + # Check the masked array fill value is propagated through the data # manager loading. lnsp = cubes[1] self.assertTrue(ma.isMaskedArray(lnsp.data)) @@ -204,7 +204,7 @@ def test_missing_climatology(self): ("NetCDF", "transverse_mercator", "tmean_1910_1910.nc") ) shutil.copyfile(src, filename) - dataset = nc.Dataset(filename, mode="a") + dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a") dataset.renameVariable("climatology_bounds", "foo") dataset.close() _ = iris.load_cube(filename, "Mean temperature") @@ -632,7 +632,7 @@ def test_netcdf_save_format(self): with self.temp_filename(suffix=".nc") as file_out: # Test default NETCDF4 file format saving. iris.save(cube, file_out) - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF4", "Failed to save as NETCDF4 format" ) @@ -640,7 +640,7 @@ def test_netcdf_save_format(self): # Test NETCDF4_CLASSIC file format saving. iris.save(cube, file_out, netcdf_format="NETCDF4_CLASSIC") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF4_CLASSIC", @@ -650,7 +650,7 @@ def test_netcdf_save_format(self): # Test NETCDF3_CLASSIC file format saving. iris.save(cube, file_out, netcdf_format="NETCDF3_CLASSIC") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF3_CLASSIC", @@ -660,7 +660,7 @@ def test_netcdf_save_format(self): # Test NETCDF4_64BIT file format saving. iris.save(cube, file_out, netcdf_format="NETCDF3_64BIT") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertTrue( ds.file_format in ["NETCDF3_64BIT", "NETCDF3_64BIT_OFFSET"], "Failed to save as NETCDF3_64BIT format", @@ -1029,11 +1029,11 @@ def test_attributes(self): } for k, v in aglobals.items(): self.cube.attributes[k] = v - # Should be overriden. + # Should be overridden. aover = {"Conventions": "TEST"} for k, v in aover.items(): self.cube.attributes[k] = v - # Should be data varible attributes. + # Should be data variable attributes. avars = { "standard_error_multiplier": 23, "flag_masks": "a", @@ -1047,7 +1047,7 @@ def test_attributes(self): with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename) # Load the dataset. - ds = nc.Dataset(filename, "r") + ds = _thread_safe_nc.DatasetWrapper(filename, "r") exceptions = [] # Should be global attributes. for gkey in aglobals: @@ -1057,14 +1057,14 @@ def test_attributes(self): getattr(ds, gkey), aglobals.get(gkey) ) ) - # Should be overriden. + # Should be overridden. for okey in aover: if getattr(ds, okey) == aover.get(okey): exceptions.append( "{} != {}".format(getattr(ds, okey), avars.get(okey)) ) dv = ds["temp"] - # Should be data varible attributes; + # Should be data variable attributes; # except STASH -> um_stash_source. for vkey in avars: if vkey != "STASH" and (getattr(dv, vkey) != avars.get(vkey)): @@ -1211,7 +1211,7 @@ def test_shared(self): self.assertCDL(filename) # Also check that only one, shared ancillary variable was written. - ds = nc.Dataset(filename) + ds = _thread_safe_nc.DatasetWrapper(filename) self.assertIn("air_potential_temperature", ds.variables) self.assertIn("alternate_data", ds.variables) self.assertEqual( @@ -1405,7 +1405,7 @@ def test_process_flags(self): process_flag = cube.attributes["ukmo__process_flags"][0] self.assertEqual(process_flag, process_desc) - # Test mutiple process flags + # Test multiple process flags multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024)) # Maps lbproc value to the process flags that should be created diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index c9eba31e58..55c912f423 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -193,7 +193,7 @@ def test_coord_coord_map(self): self.check_graphic() def test_cube_cube(self): - # plot two phenomena against eachother, in this case just dummy data + # plot two phenomena against each other, in this case just dummy data cube1 = self.cube1d.copy() cube2 = self.cube1d.copy() cube1.rename("some phenomenon") @@ -668,7 +668,7 @@ class CheckForWarningsMetaclass(type): """ Metaclass that adds a further test for each base class test that checks that each test raises a UserWarning. Each base - class test is then overriden to ignore warnings in order to + class test is then overridden to ignore warnings in order to check the underlying functionality. """ @@ -1001,6 +1001,15 @@ def test_non_cube_coordinate(self): self.draw("contourf", cube, coords=["grid_latitude", x]) +@tests.skip_data +@tests.skip_plot +class TestPlotHist(tests.GraphicsTest): + def test_cube(self): + cube = simple_cube()[0] + iplt.hist(cube, bins=np.linspace(287.7, 288.2, 11)) + self.check_graphic() + + @tests.skip_data @tests.skip_plot class TestPlotDimAndAuxCoordsKwarg(tests.GraphicsTest): diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 2b497cb53b..49bedaf1e2 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -10,10 +10,9 @@ import os import tempfile -import netCDF4 - import iris import iris.coords +from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.pp import STASH import iris.tests.pp as pp import iris.util @@ -95,7 +94,7 @@ def _test_file(self, name): for index, cube in enumerate(cubes): # Explicitly set a fill-value as a workaround for # https://github.com/Unidata/netcdf4-python/issues/725 - fill_value = netCDF4.default_fillvals[cube.dtype.str[1:]] + fill_value = _thread_safe_nc.default_fillvals[cube.dtype.str[1:]] file_nc = tempfile.NamedTemporaryFile( suffix=".nc", delete=False diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index 7eebfb07b3..ca7f1c50eb 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -155,10 +155,10 @@ def test_lbtim_access(self): def test_t1_t2_access(self): field = self.r[0] - calander = "360_day" + calendar = "360_day" self.assertEqual( field.t1.timetuple(), - cftime.datetime(1994, 12, 1, 0, 0, calendar=calander).timetuple(), + cftime.datetime(1994, 12, 1, 0, 0, calendar=calendar).timetuple(), ) def test_save_single(self): diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index 06f170c666..df2db12de6 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -10,6 +10,9 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip + +import numpy as np + import iris import iris.tests.test_plot as test_plot @@ -281,5 +284,21 @@ def test_without_axes__default(self): self._check(mappable, self.figure2, self.axes2) +@tests.skip_data +@tests.skip_plot +class TestPlotHist(tests.GraphicsTest): + def test_horizontal(self): + cube = test_plot.simple_cube()[0] + qplt.hist(cube, bins=np.linspace(287.7, 288.2, 11)) + self.check_graphic() + + def test_vertical(self): + cube = test_plot.simple_cube()[0] + qplt.hist( + cube, bins=np.linspace(287.7, 288.2, 11), orientation="horizontal" + ) + self.check_graphic() + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 7952b3bb46..212a39bf2d 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -55,7 +55,7 @@ def uv_cubes_3d(ref_cube, n_realization=3): """ Return 3d u, v cubes with a grid in a rotated pole CRS taken from the provided 2d cube, by adding a realization dimension - coordinate bound to teh zeroth dimension. + coordinate bound to the zeroth dimension. """ lat = ref_cube.coord("grid_latitude") @@ -451,7 +451,7 @@ def test_rotated_to_osgb(self): assert anom[~ut.data.mask].max() == pytest.approx(0.3227935) def test_rotated_to_unrotated(self): - # Suffiently accurate so that no mask is introduced. + # Sufficiently accurate so that no mask is introduced. u, v = uv_cubes() ut, vt = rotate_winds(u, v, iris.coord_systems.GeogCS(6371229)) self.assertFalse(ma.isMaskedArray(ut.data)) @@ -511,5 +511,62 @@ def test_non_earth_semimajor_axis(self): rotate_winds(u, v, other_cs) +class TestLazyRotateWinds(tests.IrisTest): + def _compare_lazy_rotate_winds(self, masked): + # Compute wind rotation with lazy data and compare results + + # Choose target coord system that will (not) lead to masked results + if masked: + coord_sys = iris.coord_systems.OSGB() + else: + coord_sys = iris.coord_systems.GeogCS(6371229) + + u, v = uv_cubes() + + # Create deep copy of the cubes with rechunked lazy data to check if + # input data is modified, and if Dask metadata is preserved + u_lazy = u.copy(data=u.copy().lazy_data().rechunk([2, 1])) + v_lazy = v.copy(data=v.copy().lazy_data().rechunk([1, 2])) + + ut_ref, vt_ref = rotate_winds(u, v, coord_sys) + self.assertFalse(ut_ref.has_lazy_data()) + self.assertFalse(vt_ref.has_lazy_data()) + # Ensure that choice of target coordinates leads to (no) masking + self.assertTrue(ma.isMaskedArray(ut_ref.data) == masked) + + # Results are lazy if at least one component is lazy + ut, vt = rotate_winds(u_lazy, v, coord_sys) + self.assertTrue(ut.has_lazy_data()) + self.assertTrue(vt.has_lazy_data()) + self.assertTrue(ut.core_data().chunksize == (2, 1)) + self.assertArrayAllClose(ut.data, ut_ref.data, rtol=1e-5) + self.assertArrayAllClose(vt.data, vt_ref.data, rtol=1e-5) + + ut, vt = rotate_winds(u, v_lazy, coord_sys) + self.assertTrue(ut.has_lazy_data()) + self.assertTrue(vt.has_lazy_data()) + self.assertTrue(vt.core_data().chunksize == (1, 2)) + self.assertArrayAllClose(ut.data, ut_ref.data, rtol=1e-5) + self.assertArrayAllClose(vt.data, vt_ref.data, rtol=1e-5) + + ut, vt = rotate_winds(u_lazy, v_lazy, coord_sys) + self.assertTrue(ut.has_lazy_data()) + self.assertTrue(vt.has_lazy_data()) + self.assertTrue(ut.core_data().chunksize == (2, 1)) + self.assertTrue(vt.core_data().chunksize == (1, 2)) + self.assertArrayAllClose(ut.data, ut_ref.data, rtol=1e-5) + self.assertArrayAllClose(vt.data, vt_ref.data, rtol=1e-5) + + # Ensure that input data has not been modified + self.assertArrayAllClose(u.data, u_lazy.data, rtol=1e-5) + self.assertArrayAllClose(v.data, v_lazy.data, rtol=1e-5) + + def test_lazy_rotate_winds_masked(self): + self._compare_lazy_rotate_winds(True) + + def test_lazy_rotate_winds_notmasked(self): + self._compare_lazy_rotate_winds(False) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index 6c3999a6f4..a91a08dcb8 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -132,7 +132,7 @@ def setUp(self): def test_interpolate_bad_coord_name(self): with self.assertRaises(iris.exceptions.CoordinateNotFoundError): RectilinearInterpolator( - self.cube, ["doesnt exist"], LINEAR, EXTRAPOLATE + self.cube, ["doesn't exist"], LINEAR, EXTRAPOLATE ) def test_interpolate_data_single(self): diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py index 558a6fccfe..c259bdeff6 100644 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ b/lib/iris/tests/unit/analysis/maths/__init__.py @@ -155,7 +155,7 @@ def test_slice(self): # of the mesh-coords, which don't match to the originals. # FOR NOW: remove those, for a result matching the other ones. # TODO: coord equivalence may need reviewing, either for cube - # maths or for coord equivalance generally. + # maths or for coord equivalence generally. # cf. https://github.com/SciTools/iris/issues/4671 if cube.mesh and dim == cube.mesh_dim(): for co in cube.coords(mesh_coords=True): diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py new file mode 100644 index 0000000000..11664af115 --- /dev/null +++ b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py @@ -0,0 +1,29 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for cube arithmetic with dask arrays.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from unittest import mock + +import dask +import dask.array as da + +import iris.cube +from iris.tests.unit.analysis.maths import MathsAddOperationMixin + + +class TestArithDask(tests.IrisTest, MathsAddOperationMixin): + @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) + def test_compute_not_called(self, mocked_compute): + # No data should be realised when adding a cube and a dask array. + cube = iris.cube.Cube(da.arange(4)) + array = da.ones(4) + + self.data_op(cube, array) + mocked_compute.assert_not_called() diff --git a/lib/iris/tests/unit/analysis/test_Aggregator.py b/lib/iris/tests/unit/analysis/test_Aggregator.py index ec837ea49a..45081ad07f 100644 --- a/lib/iris/tests/unit/analysis/test_Aggregator.py +++ b/lib/iris/tests/unit/analysis/test_Aggregator.py @@ -15,6 +15,7 @@ import numpy.ma as ma from iris.analysis import Aggregator +from iris.cube import Cube from iris.exceptions import LazyAggregatorError @@ -286,10 +287,30 @@ def test_units_change(self): units_func = mock.Mock(return_value=mock.sentinel.new_units) aggregator = Aggregator("", None, units_func) cube = mock.Mock(units=mock.sentinel.units) - aggregator.update_metadata(cube, []) - units_func.assert_called_once_with(mock.sentinel.units) + aggregator.update_metadata(cube, [], kw1=1, kw2=2) + units_func.assert_called_once_with(mock.sentinel.units, kw1=1, kw2=2) self.assertEqual(cube.units, mock.sentinel.new_units) + def test_units_func_no_kwargs(self): + # To ensure backwards-compatibility, Aggregator also supports + # units_func that accept the single argument `units` + def units_func(units): + return units**2 + + aggregator = Aggregator("", None, units_func) + cube = Cube(0, units="s") + aggregator.update_metadata(cube, [], kw1=1, kw2=2) + self.assertEqual(cube.units, "s2") + + def test_units_func_kwargs(self): + def units_func(units, **kwargs): + return units**2 + + aggregator = Aggregator("", None, units_func) + cube = Cube(0, units="s") + aggregator.update_metadata(cube, [], kw1=1, kw2=2) + self.assertEqual(cube.units, "s2") + class Test_lazy_aggregate(tests.IrisTest): def test_kwarg_pass_through_no_kwargs(self): diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py index 141b3e262b..74f309ce00 100644 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ b/lib/iris/tests/unit/analysis/test_RMS.py @@ -101,20 +101,16 @@ def test_1d_weighted(self): data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) weights = np.array([1, 4, 3, 2], dtype=np.float64) expected_rms = 8.0 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) + rms = RMS.lazy_aggregate(data, 0, weights=weights) + self.assertAlmostEqual(rms, expected_rms) def test_1d_lazy_weighted(self): # 1-dimensional input with lazy weights. data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) weights = as_lazy_data(np.array([1, 4, 3, 2], dtype=np.float64)) expected_rms = 8.0 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) + rms = RMS.lazy_aggregate(data, 0, weights=weights) + self.assertAlmostEqual(rms, expected_rms) def test_2d_weighted(self): # 2-dimensional input with weights. @@ -123,20 +119,16 @@ def test_2d_weighted(self): ) weights = np.array([[1, 4, 3, 2], [2, 1, 1.5, 0.5]], dtype=np.float64) expected_rms = np.array([8.0, 16.0], dtype=np.float64) - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 1, weights=weights) - self.assertArrayAlmostEqual(rms, expected_rms) + rms = RMS.lazy_aggregate(data, 1, weights=weights) + self.assertArrayAlmostEqual(rms, expected_rms) def test_unit_weighted(self): # Unit weights should be the same as no weights. data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64)) weights = np.ones_like(data) expected_rms = 4.5 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) + rms = RMS.lazy_aggregate(data, 0, weights=weights) + self.assertAlmostEqual(rms, expected_rms) def test_masked(self): # Masked entries should be completely ignored. @@ -152,9 +144,6 @@ def test_masked(self): self.assertAlmostEqual(rms, expected_rms) def test_masked_weighted(self): - # Weights should work properly with masked arrays, but currently don't - # (see https://github.com/dask/dask/issues/3846). - # For now, masked weights are simply not supported. data = as_lazy_data( ma.array( [4, 7, 18, 10, 11, 8], @@ -164,9 +153,8 @@ def test_masked_weighted(self): ) weights = np.array([1, 4, 5, 3, 8, 2]) expected_rms = 8.0 - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) + rms = RMS.lazy_aggregate(data, 0, weights=weights) + self.assertAlmostEqual(rms, expected_rms) class Test_name(tests.IrisTest): diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index 3375f63bf2..f8bd54093f 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -12,6 +12,8 @@ # importing anything else. import iris.tests as tests # isort:skip +from unittest import mock + import numpy as np import iris @@ -143,7 +145,11 @@ def setUp(self): path = tests.get_data_path( ["NetCDF", "testing", "small_theta_colpex.nc"] ) - self.cube = iris.load_cube(path, "air_potential_temperature") + # While loading, "turn off" loading small variables as real data. + with mock.patch( + "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 + ): + self.cube = iris.load_cube(path, "air_potential_temperature") def _check_lazy(self): coords = self.cube.aux_coords + self.cube.derived_coords diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index 4ffeb7a67a..155c4f99b8 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -121,7 +121,7 @@ class MetadataChild(MetadataParent): expected = dict(zip(members_parent, values_parent)) self.assertEqual(metadata_parent._asdict(), expected) - # Check the dependant child class... + # Check the dependent child class... expected = ["MetadataChildNamedtuple", "MetadataParent"] self.assertEqual(self.names(MetadataChild.__bases__), expected) expected = [ diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 5ecf0b90d5..cbb29b7161 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -167,10 +167,10 @@ def setUp(self): def test_pickle(self): for protocol in range(pickle.HIGHEST_PROTOCOL + 1): with self.temp_filename(suffix=".pkl") as fname: - with open(fname, "wb") as fo: - pickle.dump(self.metadata, fo, protocol=protocol) - with open(fname, "rb") as fi: - metadata = pickle.load(fi) + with open(fname, "wb") as fout: + pickle.dump(self.metadata, fout, protocol=protocol) + with open(fname, "rb") as fin: + metadata = pickle.load(fin) self.assertEqual(metadata, self.metadata) diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 840f65db01..db1759c5fc 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -825,20 +825,6 @@ def setUp(self): ), ] - def _copy(self, items): - # Due to a bug in python 3.6.x, performing a deepcopy of a mock.sentinel - # will yield an object that is not equivalent to its parent, so this - # is a work-around until we drop support for python 3.6.x. - import sys - - version = sys.version_info - major, minor = version.major, version.minor - result = deepcopy(items) - if major == 3 and minor <= 6: - for i, item in enumerate(items): - result[i] = result[i]._replace(metadata=item.metadata) - return result - def test_no_mapping(self): result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) self.assertEqual(dict(), result) @@ -852,7 +838,7 @@ def test_full_mapping(self): def test_transpose_mapping(self): self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0].dims[0] = 2 items[2].dims[0] = 0 self.tgt_coverage.common_items_aux.extend(items) @@ -863,7 +849,7 @@ def test_transpose_mapping(self): def test_partial_mapping__transposed(self): _ = self.items.pop(1) self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0].dims[0] = 2 items[1].dims[0] = 0 self.tgt_coverage.common_items_aux.extend(items) @@ -872,7 +858,7 @@ def test_partial_mapping__transposed(self): self.assertEqual(expected, result) def test_mapping__match_multiple_src_metadata(self): - items = self._copy(self.items) + items = deepcopy(self.items) _ = self.items.pop(1) self.src_coverage.common_items_aux.extend(self.items) items[1] = items[0] @@ -882,7 +868,7 @@ def test_mapping__match_multiple_src_metadata(self): self.assertEqual(expected, result) def test_mapping__skip_match_multiple_src_metadata(self): - items = self._copy(self.items) + items = deepcopy(self.items) _ = self.items.pop(1) self.tgt_coverage.common_items_aux.extend(self.items) items[1] = items[0]._replace(dims=[1]) @@ -892,7 +878,7 @@ def test_mapping__skip_match_multiple_src_metadata(self): self.assertEqual(expected, result) def test_mapping__skip_different_rank(self): - items = self._copy(self.items) + items = deepcopy(self.items) self.src_coverage.common_items_aux.extend(self.items) items[2] = items[2]._replace(dims=[1, 2]) self.tgt_coverage.common_items_aux.extend(items) @@ -902,7 +888,7 @@ def test_mapping__skip_different_rank(self): def test_bad_metadata_mapping(self): self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0] = items[0]._replace(metadata=sentinel.bad) self.tgt_coverage.common_items_aux.extend(items) emsg = "Failed to map common aux coordinate metadata" diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 96d13d7d15..a4243dfbbc 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -15,6 +15,7 @@ from iris._concatenate import concatenate from iris._lazy_data import as_lazy_data +from iris.aux_factory import HybridHeightFactory import iris.coords import iris.cube from iris.exceptions import ConcatenateError @@ -90,16 +91,26 @@ def setUp(self): iris.coords.AuxCoord([0, 1, 2], long_name="foo", units="1"), data_dims=(1,), ) + # Cell Measures cube.add_cell_measure( iris.coords.CellMeasure([0, 1, 2], long_name="bar", units="1"), data_dims=(1,), ) + # Ancillary Variables cube.add_ancillary_variable( iris.coords.AncillaryVariable( [0, 1, 2], long_name="baz", units="1" ), data_dims=(1,), ) + # Derived Coords + delta = iris.coords.AuxCoord(0.0, var_name="delta", units="m") + sigma = iris.coords.AuxCoord(1.0, var_name="sigma", units="1") + orog = iris.coords.AuxCoord(2.0, var_name="orog", units="m") + cube.add_aux_coord(delta, ()) + cube.add_aux_coord(sigma, ()) + cube.add_aux_coord(orog, ()) + cube.add_aux_factory(HybridHeightFactory(delta, sigma, orog)) self.cube = cube def test_definition_difference_message(self): @@ -190,6 +201,22 @@ def test_ancillary_variable_metadata_difference_message(self): with self.assertRaisesRegex(ConcatenateError, exc_regexp): _ = concatenate([cube_1, cube_2], True) + def test_derived_coord_difference_message(self): + cube_1 = self.cube + cube_2 = cube_1.copy() + cube_2.remove_aux_factory(cube_2.aux_factories[0]) + exc_regexp = "Derived coordinates differ: .* != .*" + with self.assertRaisesRegex(ConcatenateError, exc_regexp): + _ = concatenate([cube_1, cube_2], True) + + def test_derived_coord_metadata_difference_message(self): + cube_1 = self.cube + cube_2 = cube_1.copy() + cube_2.aux_factories[0].units = "km" + exc_regexp = "Derived coordinates metadata differ: .* != .*" + with self.assertRaisesRegex(ConcatenateError, exc_regexp): + _ = concatenate([cube_1, cube_2], True) + def test_ndim_difference_message(self): cube_1 = self.cube cube_2 = iris.cube.Cube( diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index 81370bd0de..d191993d51 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -31,7 +31,7 @@ def assert_raises_on_comparison(self, cell, other, exception_type, regexp): cell >= other def test_PartialDateTime_bounded_cell(self): - # Check that bounded comparisions to a PartialDateTime + # Check that bounded comparisons to a PartialDateTime # raise an exception. These are not supported as they # depend on the calendar. dt = PartialDateTime(month=6) diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index b10fd41834..21b309a32b 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -22,7 +22,7 @@ def setUp(self): def _check(self, token, coord, default=False): result = CellMethod(self.method, coords=coord) token = token if not default else BaseMetadata.DEFAULT_NAME - expected = "{}: {}".format(self.method, token) + expected = "{}: {}".format(token, self.method) self.assertEqual(str(result), expected) def test_coord_standard_name(self): @@ -64,27 +64,27 @@ def test_coord_stash_default(self): def test_string(self): token = "air_temperature" result = CellMethod(self.method, coords=token) - expected = "{}: {}".format(self.method, token) + expected = "{}: {}".format(token, self.method) self.assertEqual(str(result), expected) def test_string_default(self): token = "air temperature" # includes space result = CellMethod(self.method, coords=token) - expected = "{}: unknown".format(self.method) + expected = "unknown: {}".format(self.method) self.assertEqual(str(result), expected) def test_mixture(self): token = "air_temperature" coord = AuxCoord(1, standard_name=token) result = CellMethod(self.method, coords=[coord, token]) - expected = "{}: {}, {}".format(self.method, token, token) + expected = "{}: {}: {}".format(token, token, self.method) self.assertEqual(str(result), expected) def test_mixture_default(self): token = "air temperature" # includes space coord = AuxCoord(1, long_name=token) result = CellMethod(self.method, coords=[coord, token]) - expected = "{}: unknown, unknown".format(self.method) + expected = "unknown: unknown: {}".format(self.method) self.assertEqual(str(result), expected) diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 72a48437ec..69b6b70c96 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -708,7 +708,7 @@ def test_1d_discontiguous(self): coord = DimCoord([10, 20, 40], bounds=[[5, 15], [15, 25], [35, 45]]) contiguous, diffs = coord._discontiguity_in_bounds() self.assertFalse(contiguous) - self.assertArrayEqual(diffs, np.array([0, 10])) + self.assertArrayEqual(diffs, np.array([False, True])) def test_1d_one_cell(self): # Test a 1D coord with a single cell. diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 8e9e00dce8..aa9e3b51b1 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -20,7 +20,7 @@ from iris._lazy_data import as_lazy_data import iris.analysis -from iris.analysis import MEAN, Aggregator, WeightedAggregator +from iris.analysis import MEAN, SUM, Aggregator, WeightedAggregator import iris.aux_factory from iris.aux_factory import HybridHeightFactory from iris.common.metadata import BaseMetadata @@ -320,18 +320,36 @@ def test_dim0_lazy(self): self.assertArrayAlmostEqual(cube_collapsed.data, [1.5, 2.5, 3.5]) self.assertFalse(cube_collapsed.has_lazy_data()) + def test_dim0_lazy_weights_none(self): + cube_collapsed = self.cube.collapsed("y", MEAN, weights=None) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual(cube_collapsed.data, [1.5, 2.5, 3.5]) + self.assertFalse(cube_collapsed.has_lazy_data()) + def test_dim1_lazy(self): cube_collapsed = self.cube.collapsed("x", MEAN) self.assertTrue(cube_collapsed.has_lazy_data()) self.assertArrayAlmostEqual(cube_collapsed.data, [1.0, 4.0]) self.assertFalse(cube_collapsed.has_lazy_data()) + def test_dim1_lazy_weights_none(self): + cube_collapsed = self.cube.collapsed("x", MEAN, weights=None) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual(cube_collapsed.data, [1.0, 4.0]) + self.assertFalse(cube_collapsed.has_lazy_data()) + def test_multidims(self): # Check that MEAN works with multiple dims. cube_collapsed = self.cube.collapsed(("x", "y"), MEAN) self.assertTrue(cube_collapsed.has_lazy_data()) self.assertArrayAllClose(cube_collapsed.data, 2.5) + def test_multidims_weights_none(self): + # Check that MEAN works with multiple dims. + cube_collapsed = self.cube.collapsed(("x", "y"), MEAN, weights=None) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAllClose(cube_collapsed.data, 2.5) + def test_non_lazy_aggregator(self): # An aggregator which doesn't have a lazy function should still work. dummy_agg = Aggregator( @@ -342,18 +360,19 @@ def test_non_lazy_aggregator(self): self.assertArrayEqual(result.data, np.mean(self.data, axis=1)) -class Test_collapsed__multidim_weighted(tests.IrisTest): +class Test_collapsed__multidim_weighted_with_arr(tests.IrisTest): def setUp(self): self.data = np.arange(6.0).reshape((2, 3)) self.lazydata = as_lazy_data(self.data) - # Test cubes wth (same-valued) real and lazy data - cube_real = Cube(self.data) + # Test cubes with (same-valued) real and lazy data + cube_real = Cube(self.data, units="m") for i_dim, name in enumerate(("y", "x")): npts = cube_real.shape[i_dim] coord = DimCoord(np.arange(npts), long_name=name) cube_real.add_dim_coord(coord, i_dim) self.cube_real = cube_real self.cube_lazy = cube_real.copy(data=self.lazydata) + self.cube_lazy.units = "kg" # Test weights and expected result for a y-collapse self.y_weights = np.array([0.3, 0.5]) self.full_weights_y = np.broadcast_to( @@ -375,6 +394,7 @@ def test_weighted_fullweights_real_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) + self.assertEqual(cube_collapsed.units, "m") def test_weighted_fullweights_lazy_y(self): # Full-shape weights, lazy data : Check lazy result, same values as real calc. @@ -385,6 +405,7 @@ def test_weighted_fullweights_lazy_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) + self.assertEqual(cube_collapsed.units, "kg") def test_weighted_1dweights_real_y(self): # 1-D weights, real data : Check same results as full-shape. @@ -394,6 +415,7 @@ def test_weighted_1dweights_real_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) + self.assertEqual(cube_collapsed.units, "m") def test_weighted_1dweights_lazy_y(self): # 1-D weights, lazy data : Check lazy result, same values as real calc. @@ -404,6 +426,7 @@ def test_weighted_1dweights_lazy_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) + self.assertEqual(cube_collapsed.units, "kg") def test_weighted_fullweights_real_x(self): # Full weights, real data, ** collapse X ** : as for 'y' case above @@ -413,6 +436,7 @@ def test_weighted_fullweights_real_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) + self.assertEqual(cube_collapsed.units, "m") def test_weighted_fullweights_lazy_x(self): # Full weights, lazy data, ** collapse X ** : as for 'y' case above @@ -423,6 +447,7 @@ def test_weighted_fullweights_lazy_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) + self.assertEqual(cube_collapsed.units, "kg") def test_weighted_1dweights_real_x(self): # 1-D weights, real data, ** collapse X ** : as for 'y' case above @@ -432,6 +457,7 @@ def test_weighted_1dweights_real_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) + self.assertEqual(cube_collapsed.units, "m") def test_weighted_1dweights_lazy_x(self): # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above @@ -442,6 +468,148 @@ def test_weighted_1dweights_lazy_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) + self.assertEqual(cube_collapsed.units, "kg") + + def test_weighted_sum_fullweights_adapt_units_real_y(self): + # Check that units are adapted correctly ('m' * '1' = 'm') + cube_collapsed = self.cube_real.collapsed( + "y", SUM, weights=self.full_weights_y + ) + self.assertEqual(cube_collapsed.units, "m") + + def test_weighted_sum_fullweights_adapt_units_lazy_y(self): + # Check that units are adapted correctly ('kg' * '1' = 'kg') + cube_collapsed = self.cube_lazy.collapsed( + "y", SUM, weights=self.full_weights_y + ) + self.assertEqual(cube_collapsed.units, "kg") + + def test_weighted_sum_1dweights_adapt_units_real_y(self): + # Check that units are adapted correctly ('m' * '1' = 'm') + # Note: the same test with lazy data fails: + # https://github.com/SciTools/iris/issues/5083 + cube_collapsed = self.cube_real.collapsed( + "y", SUM, weights=self.y_weights + ) + self.assertEqual(cube_collapsed.units, "m") + + def test_weighted_sum_with_unknown_units_real_y(self): + # Check that units are adapted correctly ('unknown' * '1' = 'unknown') + # Note: does not need to be adapted in subclasses since 'unknown' + # multiplied by any unit is 'unknown' + self.cube_real.units = "unknown" + cube_collapsed = self.cube_real.collapsed( + "y", + SUM, + weights=self.full_weights_y, + ) + self.assertEqual(cube_collapsed.units, "unknown") + + def test_weighted_sum_with_unknown_units_lazy_y(self): + # Check that units are adapted correctly ('unknown' * '1' = 'unknown') + # Note: does not need to be adapted in subclasses since 'unknown' + # multiplied by any unit is 'unknown' + self.cube_lazy.units = "unknown" + cube_collapsed = self.cube_lazy.collapsed( + "y", + SUM, + weights=self.full_weights_y, + ) + self.assertEqual(cube_collapsed.units, "unknown") + + +# Simply redo the tests of Test_collapsed__multidim_weighted_with_arr with +# other allowed objects for weights + + +class Test_collapsed__multidim_weighted_with_cube( + Test_collapsed__multidim_weighted_with_arr +): + def setUp(self): + super().setUp() + + self.y_weights_original = self.y_weights + self.full_weights_y_original = self.full_weights_y + self.x_weights_original = self.x_weights + self.full_weights_x_original = self.full_weights_x + + self.y_weights = self.cube_real[:, 0].copy(self.y_weights_original) + self.y_weights.units = "m2" + self.full_weights_y = self.cube_real.copy(self.full_weights_y_original) + self.full_weights_y.units = "m2" + self.x_weights = self.cube_real[0, :].copy(self.x_weights_original) + self.full_weights_x = self.cube_real.copy(self.full_weights_x_original) + + def test_weighted_sum_fullweights_adapt_units_real_y(self): + # Check that units are adapted correctly ('m' * 'm2' = 'm3') + cube_collapsed = self.cube_real.collapsed( + "y", SUM, weights=self.full_weights_y + ) + self.assertEqual(cube_collapsed.units, "m3") + + def test_weighted_sum_fullweights_adapt_units_lazy_y(self): + # Check that units are adapted correctly ('kg' * 'm2' = 'kg m2') + cube_collapsed = self.cube_lazy.collapsed( + "y", SUM, weights=self.full_weights_y + ) + self.assertEqual(cube_collapsed.units, "kg m2") + + def test_weighted_sum_1dweights_adapt_units_real_y(self): + # Check that units are adapted correctly ('m' * 'm2' = 'm3') + # Note: the same test with lazy data fails: + # https://github.com/SciTools/iris/issues/5083 + cube_collapsed = self.cube_real.collapsed( + "y", SUM, weights=self.y_weights + ) + self.assertEqual(cube_collapsed.units, "m3") + + +class Test_collapsed__multidim_weighted_with_str( + Test_collapsed__multidim_weighted_with_cube +): + def setUp(self): + super().setUp() + + self.full_weights_y = "full_y" + self.full_weights_x = "full_x" + self.y_weights = "y" + self.x_weights = "1d_x" + + self.dim_metadata_full_y = iris.coords.CellMeasure( + self.full_weights_y_original, + long_name=self.full_weights_y, + units="m2", + ) + self.dim_metadata_full_x = iris.coords.AuxCoord( + self.full_weights_x_original, + long_name=self.full_weights_x, + units="m2", + ) + self.dim_metadata_1d_y = iris.coords.DimCoord( + self.y_weights_original, long_name=self.y_weights, units="m2" + ) + self.dim_metadata_1d_x = iris.coords.AncillaryVariable( + self.x_weights_original, long_name=self.x_weights, units="m2" + ) + + for cube in (self.cube_real, self.cube_lazy): + cube.add_cell_measure(self.dim_metadata_full_y, (0, 1)) + cube.add_aux_coord(self.dim_metadata_full_x, (0, 1)) + cube.remove_coord("y") + cube.add_dim_coord(self.dim_metadata_1d_y, 0) + cube.add_ancillary_variable(self.dim_metadata_1d_x, 1) + + +class Test_collapsed__multidim_weighted_with_dim_metadata( + Test_collapsed__multidim_weighted_with_str +): + def setUp(self): + super().setUp() + + self.full_weights_y = self.dim_metadata_full_y + self.full_weights_x = self.dim_metadata_full_x + self.y_weights = self.dim_metadata_1d_y + self.x_weights = self.dim_metadata_1d_x class Test_collapsed__cellmeasure_ancils(tests.IrisTest): @@ -501,7 +669,7 @@ def _assert_warn_collapse_without_weight(self, coords, warn): self.assertIn(mock.call(msg.format(coord)), warn.call_args_list) def _assert_nowarn_collapse_without_weight(self, coords, warn): - # Ensure that warning is not rised. + # Ensure that warning is not raised. msg = "Collapsing spatial coordinate {!r} without weighting" for coord in coords: self.assertNotIn(mock.call(msg.format(coord)), warn.call_args_list) @@ -590,7 +758,7 @@ def _assert_warn_cannot_check_contiguity(self, warn): self.assertIn(mock.call(msg), warn.call_args_list) def _assert_cube_as_expected(self, cube): - """Ensure that cube data and coordiantes are as expected.""" + """Ensure that cube data and coordinates are as expected.""" self.assertArrayEqual(cube.data, np.array(3)) lat = cube.coord("latitude") @@ -604,16 +772,14 @@ def _assert_cube_as_expected(self, cube): def test_collapsed_lat_with_3_bounds(self): """Collapse latitude with 3 bounds.""" with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed("latitude", iris.analysis.SUM) + collapsed_cube = self.cube.collapsed("latitude", SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) def test_collapsed_lon_with_3_bounds(self): """Collapse longitude with 3 bounds.""" with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed( - "longitude", iris.analysis.SUM - ) + collapsed_cube = self.cube.collapsed("longitude", SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) @@ -621,7 +787,7 @@ def test_collapsed_lat_lon_with_3_bounds(self): """Collapse latitude and longitude with 3 bounds.""" with mock.patch("warnings.warn") as warn: collapsed_cube = self.cube.collapsed( - ["latitude", "longitude"], iris.analysis.SUM + ["latitude", "longitude"], SUM ) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) @@ -741,9 +907,9 @@ def test_different_array_attrs_incompatible(self): class Test_rolling_window(tests.IrisTest): def setUp(self): - self.cube = Cube(np.arange(6)) + self.cube = Cube(np.arange(6), units="kg") self.multi_dim_cube = Cube(np.arange(36).reshape(6, 6)) - val_coord = DimCoord([0, 1, 2, 3, 4, 5], long_name="val") + val_coord = DimCoord([0, 1, 2, 3, 4, 5], long_name="val", units="s") month_coord = AuxCoord( ["jan", "feb", "mar", "apr", "may", "jun"], long_name="month" ) @@ -770,6 +936,7 @@ def test_string_coord(self): np.array([1, 2, 3, 4]), bounds=np.array([[0, 2], [1, 3], [2, 4], [3, 5]]), long_name="val", + units="s", ) month_coord = AuxCoord( np.array( @@ -818,6 +985,30 @@ def test_ancillary_variables_and_cell_measures_removed(self): self.assertEqual(res_cube.ancillary_variables(), []) self.assertEqual(res_cube.cell_measures(), []) + def test_weights_arr(self): + weights = [0, 0, 1, 0, 2] + res_cube = self.cube.rolling_window("val", SUM, 5, weights=weights) + np.testing.assert_array_equal(res_cube.data, [10, 13]) + self.assertEqual(res_cube.units, "kg") + + def test_weights_cube(self): + weights = Cube([0, 0, 1, 0, 2], units="m2") + res_cube = self.cube.rolling_window("val", SUM, 5, weights=weights) + np.testing.assert_array_equal(res_cube.data, [10, 13]) + self.assertEqual(res_cube.units, "kg m2") + + def test_weights_str(self): + weights = "val" + res_cube = self.cube.rolling_window("val", SUM, 6, weights=weights) + np.testing.assert_array_equal(res_cube.data, [55]) + self.assertEqual(res_cube.units, "kg s") + + def test_weights_dim_coord(self): + weights = self.cube.coord("val") + res_cube = self.cube.rolling_window("val", SUM, 6, weights=weights) + np.testing.assert_array_equal(res_cube.data, [55]) + self.assertEqual(res_cube.units, "kg s") + class Test_slices_dim_order(tests.IrisTest): """ @@ -905,7 +1096,7 @@ def setUp(self): len(self.cube.coord("model_level_number").points) ) self.exp_iter_2d = np.ndindex(6, 70, 1, 1) - # Define maximum number of interations for particularly long + # Define maximum number of interactions for particularly long # (and so time-consuming) iterators. self.long_iterator_max = 5 diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 9e60631c33..c591e45f63 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -67,9 +67,7 @@ def setUp(self): self.mock_agg = mock.Mock(spec=Aggregator) self.mock_agg.cell_method = [] - self.mock_agg.aggregate = mock.Mock( - return_value=mock.Mock(dtype="object") - ) + self.mock_agg.aggregate = mock.Mock(return_value=np.arange(4)) self.mock_agg.aggregate_shape = mock.Mock(return_value=()) self.mock_agg.lazy_func = None self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) @@ -79,8 +77,8 @@ def setUp(self): def mock_weighted_aggregate(*_, **kwargs): if kwargs.get("returned", False): - return (mock.Mock(dtype="object"), mock.Mock(dtype="object")) - return mock.Mock(dtype="object") + return (np.arange(11), np.ones(11)) + return np.arange(4) self.mock_weighted_agg.aggregate = mock.Mock( side_effect=mock_weighted_aggregate diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index e44aee730a..d9de814b05 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -94,7 +94,10 @@ def setUp(self): # Restrict the CFUGridReader functionality to only performing # translations and building first level cf-groups for variables. self.patch("iris.experimental.ugrid.cf.CFUGridReader._reset") - self.patch("netCDF4.Dataset", return_value=self.dataset) + self.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, + ) cf_reader = CFUGridReader("dummy") self.cf_group = cf_reader.cf_group diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 9a81c79d44..f343f4be24 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -9,10 +9,12 @@ # importing anything else. import iris.tests as tests # isort:skip +from platform import python_version from xml.dom import minidom import numpy as np from numpy import ma +from pkg_resources import parse_version from iris._lazy_data import as_lazy_data, is_lazy_data from iris.experimental.ugrid.mesh import Connectivity @@ -61,10 +63,14 @@ def test_indices(self): def test_read_only(self): attributes = ("indices", "cf_role", "start_index", "location_axis") + if parse_version(python_version()) >= parse_version("3.11"): + msg = "object has no setter" + else: + msg = "can't set attribute" for attribute in attributes: self.assertRaisesRegex( AttributeError, - "can't set attribute", + msg, setattr, self.connectivity, attribute, diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 03e2793fd9..cb90c176b6 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -11,11 +11,13 @@ # importing anything else. import iris.tests as tests # isort:skip +from platform import python_version import re import unittest.mock as mock import dask.array as da import numpy as np +from pkg_resources import parse_version import pytest from iris._lazy_data import as_lazy_data, is_lazy_data @@ -77,8 +79,12 @@ def setUp(self): def test_fixed_metadata(self): # Check that you cannot set any of these on an existing MeshCoord. meshcoord = self.meshcoord + if parse_version(python_version()) >= parse_version("3.11"): + msg = "object has no setter" + else: + msg = "can't set attribute" for prop in ("mesh", "location", "axis"): - with self.assertRaisesRegex(AttributeError, "can't set"): + with self.assertRaisesRegex(AttributeError, msg): setattr(meshcoord, prop, mock.sentinel.odd) def test_coord_system(self): @@ -273,7 +279,7 @@ def _expected_elements_regexp( # Printed name is standard or long -- we don't have a case with neither coord_name = standard_name or long_name # Construct regexp in 'sections' - # NB each consumes upto first non-space in the next line + # NB each consumes up to first non-space in the next line regexp = f"MeshCoord : {coord_name} / [^\n]+\n *" regexp += r"mesh: \\n *" regexp += f"location: '{location}'\n *" diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index dee28e98cc..9e5cf9b7a5 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -70,7 +70,10 @@ def setUp(self): ) def test_create_global_attributes(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): + with mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, + ): global_attrs = CFReader("dummy").cf_group.global_attributes self.assertEqual( global_attrs["dimensions"], "something something_else" @@ -145,7 +148,10 @@ def setUp(self): self.addCleanup(reset_patch.stop) def test_create_formula_terms(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): + with mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, + ): cf_group = CFReader("dummy").cf_group self.assertEqual(len(cf_group), len(self.variables)) # Check there is a singular data variable. @@ -247,7 +253,10 @@ def setUp(self): self.addCleanup(patcher.stop) def test_associate_formula_terms_with_data_variable(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): + with mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, + ): cf_group = CFReader("dummy").cf_group self.assertEqual(len(cf_group), len(self.variables)) # Check the cf-group associated with the data variable. @@ -296,7 +305,10 @@ def test_associate_formula_terms_with_data_variable(self): ) def test_promote_reference(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): + with mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, + ): cf_group = CFReader("dummy").cf_group self.assertEqual(len(cf_group), len(self.variables)) # Check the number of data variables. @@ -316,7 +328,8 @@ def test_promote_reference(self): def test_formula_terms_ignore(self): self.orography.dimensions = ["lat", "wibble"] with mock.patch( - "netCDF4.Dataset", return_value=self.dataset + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, ), mock.patch("warnings.warn") as warn: cf_group = CFReader("dummy").cf_group group = cf_group.promoted @@ -327,7 +340,8 @@ def test_formula_terms_ignore(self): def test_auxiliary_ignore(self): self.x.dimensions = ["lat", "wibble"] with mock.patch( - "netCDF4.Dataset", return_value=self.dataset + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, ), mock.patch("warnings.warn") as warn: cf_group = CFReader("dummy").cf_group promoted = ["x", "orography"] @@ -342,7 +356,8 @@ def test_promoted_auxiliary_ignore(self): self.variables["wibble"] = self.wibble self.orography.coordinates = "wibble" with mock.patch( - "netCDF4.Dataset", return_value=self.dataset + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=self.dataset, ), mock.patch("warnings.warn") as warn: cf_group = CFReader("dummy").cf_group.promoted promoted = ["wibble", "orography"] diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py index 7ce66c3fef..078f65d572 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py @@ -205,7 +205,7 @@ def test_height_above_sea_level(self): self.assertEqual(com, res) def test_malformed_height_above_ground(self): - # Parse height above ground level, with additonal stuff at the string + # Parse height above ground level, with additional stuff at the string # end (agl). data = "Z = 50.00000 m agl and stuff" res = _cf_height_from_name(data) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 0cc3d09426..399a987f11 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -80,42 +80,44 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Simulate the inner part of the file reading process. cf = CFReader(nc_path) - # Grab a data variable : FOR NOW always grab the 'phenom' variable. - cf_var = cf.cf_group.data_variables["phenom"] - - engine = iris.fileformats.netcdf.loader._actions_engine() - - # If debug enabled, switch on the activation summary debug output. - # Use 'patch' so it is restored after the test. - self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug) - - with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", - message="Ignoring a datum in netCDF load for consistency with existing " - "behaviour. In a future version of Iris, this datum will be " - "applied. To apply the datum when loading, use the " - "iris.FUTURE.datum_support flag.", - category=FutureWarning, - ) - # Call the main translation function to load a single cube. - # _load_cube establishes per-cube facts, activates rules and - # produces an actual cube. - cube = _load_cube(engine, cf, cf_var, nc_path) - - # Also Record, on the cubes, which hybrid coord elements were identified - # by the rules operation. - # Unlike the other translations, _load_cube does *not* convert this - # information into actual cube elements. That is instead done by - # `iris.fileformats.netcdf._load_aux_factory`. - # For rules testing, it is anyway more convenient to deal with the raw - # data, as each factory type has different validity requirements to - # build it, and none of that is relevant to the rules operation. - cube._formula_type_name = engine.requires.get("formula_type") - cube._formula_terms_byname = engine.requires.get("formula_terms") - - # Always returns a single cube. - return cube + + with cf: + # Grab a data variable : FOR NOW always grab the 'phenom' variable. + cf_var = cf.cf_group.data_variables["phenom"] + + engine = iris.fileformats.netcdf.loader._actions_engine() + + # If debug enabled, switch on the activation summary debug output. + # Use 'patch' so it is restored after the test. + self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug) + + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="Ignoring a datum in netCDF load for consistency with existing " + "behaviour. In a future version of Iris, this datum will be " + "applied. To apply the datum when loading, use the " + "iris.FUTURE.datum_support flag.", + category=FutureWarning, + ) + # Call the main translation function to load a single cube. + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Also Record, on the cubes, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + + # Always returns a single cube. + return cube def run_testcase(self, warning_regex=None, **testcase_kwargs): """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index a367e7709c..99a1b66ae4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -53,7 +53,7 @@ def _make_testcase_cdl( # we can mimic a missing grid-mapping by changing the varname from that # which the data-variable refers to, with "gridmapvar_name=xxx". # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with + # selecting an unknown 'grid_mapping_name' property, with # "gridmapvar_mappropertyname=xxx". if mapping_type_name is None: # Default grid-mapping and coords are standard lat-lon. @@ -463,7 +463,7 @@ def test_mapping_rotated(self): # All non-latlon coordinate systems ... # These all have projection-x/y coordinates with units of metres. # They all work the same way. - # NOTE: various mapping types *require* certain addtional properties + # NOTE: various mapping types *require* certain additional properties # - without which an error will occur during translation. # - run_testcase/_make_testcase_cdl know how to provide these # diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py index b057a41a3e..87070e00ba 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py @@ -15,6 +15,7 @@ from iris.exceptions import CannotAddError from iris.fileformats._nc_load_rules.helpers import build_ancil_var +from iris.fileformats.netcdf import _thread_safe_nc as threadsafe_nc @pytest.fixture @@ -31,6 +32,7 @@ def mock_engine(): def mock_cf_av_var(monkeypatch): data = np.arange(6) output = mock.Mock( + spec=threadsafe_nc.VariableWrapper, dimensions=("foo",), scale_factor=1, add_offset=0, @@ -40,6 +42,7 @@ def mock_cf_av_var(monkeypatch): long_name="wibble", units="m2", shape=data.shape, + size=np.prod(data.shape), dtype=data.dtype, __getitem__=lambda self, key: data[key], ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index 13622b72e2..369f92f238 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -8,11 +8,11 @@ build_auxilliary_coordinate`. """ - # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip +import contextlib from unittest import mock import numpy as np @@ -22,6 +22,7 @@ from iris.exceptions import CannotAddError from iris.fileformats._nc_load_rules.helpers import build_auxiliary_coordinate from iris.fileformats.cf import CFVariable +from iris.fileformats.netcdf import _thread_safe_nc as threadsafe_nc class TestBoundsVertexDim(tests.IrisTest): @@ -48,6 +49,7 @@ def setUp(self): long_name="wibble", units="m", shape=points.shape, + size=np.prod(points.shape), dtype=points.dtype, __getitem__=lambda self, key: points[key], ) @@ -111,6 +113,7 @@ def _make_cf_bounds_var(self, dimension_names): cf_name="wibble_bnds", cf_data=cf_data, shape=bounds.shape, + size=np.prod(bounds.shape), dtype=bounds.dtype, __getitem__=lambda self, key: bounds[key], ) @@ -165,6 +168,7 @@ def setUp(self): long_name="wibble", units="m", shape=points.shape, + size=np.prod(points.shape), dtype=points.dtype, __getitem__=lambda self, key: points[key], ) @@ -176,21 +180,29 @@ def setUp(self): cube_parts=dict(coordinates=[]), ) + @contextlib.contextmanager + def deferred_load_patch(self): def patched__getitem__(proxy_self, keys): if proxy_self.variable_name == self.cf_coord_var.cf_name: return self.cf_coord_var[keys] raise RuntimeError() - self.deferred_load_patch = mock.patch( + # Fix for deferred load, *AND* avoid loading small variable data in real arrays. + with mock.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, - ) + ): + # While loading, "turn off" loading small variables as real data. + with mock.patch( + "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 + ): + yield def test_scale_factor_add_offset_int(self): self.cf_coord_var.scale_factor = 3 self.cf_coord_var.add_offset = 5 - with self.deferred_load_patch: + with self.deferred_load_patch(): build_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] @@ -199,7 +211,7 @@ def test_scale_factor_add_offset_int(self): def test_scale_factor_float(self): self.cf_coord_var.scale_factor = 3.0 - with self.deferred_load_patch: + with self.deferred_load_patch(): build_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] @@ -208,7 +220,7 @@ def test_scale_factor_float(self): def test_add_offset_float(self): self.cf_coord_var.add_offset = 5.0 - with self.deferred_load_patch: + with self.deferred_load_patch(): build_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] @@ -227,6 +239,7 @@ def setUp(self): points = np.arange(6) self.cf_coord_var = mock.Mock( + spec=threadsafe_nc.VariableWrapper, dimensions=("foo",), scale_factor=1, add_offset=0, @@ -239,18 +252,21 @@ def setUp(self): units="days since 1970-01-01", calendar=None, shape=points.shape, + size=np.prod(points.shape), dtype=points.dtype, __getitem__=lambda self, key: points[key], ) bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( + spec=threadsafe_nc.VariableWrapper, dimensions=("x", "nv"), scale_factor=1, add_offset=0, cf_name="wibble_bnds", cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), shape=bounds.shape, + size=np.prod(bounds.shape), dtype=bounds.dtype, __getitem__=lambda self, key: bounds[key], ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py index efbb0649c9..d0421186b4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py @@ -15,6 +15,7 @@ from iris.exceptions import CannotAddError from iris.fileformats._nc_load_rules.helpers import build_cell_measures +from iris.fileformats.netcdf import _thread_safe_nc as threadsafe_nc @pytest.fixture @@ -31,6 +32,7 @@ def mock_engine(): def mock_cf_cm_var(monkeypatch): data = np.arange(6) output = mock.Mock( + spec=threadsafe_nc.VariableWrapper, dimensions=("foo",), scale_factor=1, add_offset=0, @@ -40,6 +42,7 @@ def mock_cf_cm_var(monkeypatch): long_name="wibble", units="m2", shape=data.shape, + size=np.prod(data.shape), dtype=data.dtype, __getitem__=lambda self, key: data[key], cf_measure="area", diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index bbde2d0a2d..729a2d8b14 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -15,7 +15,7 @@ from unittest import mock from iris.coords import CellMethod -from iris.fileformats.netcdf import parse_cell_methods +from iris.fileformats._nc_load_rules.helpers import parse_cell_methods class Test(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index 054c8e2db1..6c487d74e7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -4,7 +4,6 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """Unit tests for the `iris.fileformats.netcdf._get_cf_var_data` function.""" - # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip @@ -25,16 +24,27 @@ def setUp(self): self.shape = (300000, 240, 200) self.expected_chunks = _optimum_chunksize(self.shape, self.shape) - def _make(self, chunksizes): - cf_data = mock.Mock(_FillValue=None) + def _make( + self, chunksizes=None, shape=None, dtype="i4", **extra_properties + ): + cf_data = mock.MagicMock( + _FillValue=None, + __getitem__="", + ) cf_data.chunking = mock.MagicMock(return_value=chunksizes) + if shape is None: + shape = self.shape + dtype = np.dtype(dtype) cf_var = mock.MagicMock( spec=iris.fileformats.cf.CFVariable, - dtype=np.dtype("i4"), + dtype=dtype, cf_data=cf_data, cf_name="DUMMY_VAR", - shape=self.shape, + shape=shape, + size=np.prod(shape), + **extra_properties, ) + cf_var.__getitem__.return_value = mock.sentinel.real_data_accessed return cf_var def test_cf_data_type(self): @@ -68,6 +78,30 @@ def test_cf_data_contiguous(self): lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) + def test_type__1kf8_is_lazy(self): + cf_var = self._make(shape=(1000,), dtype="f8") + var_data = _get_cf_var_data(cf_var, self.filename) + self.assertIsInstance(var_data, dask_array) + + def test_arraytype__1ki2_is_real(self): + cf_var = self._make(shape=(1000,), dtype="i2") + var_data = _get_cf_var_data(cf_var, self.filename) + self.assertIs(var_data, mock.sentinel.real_data_accessed) + + def test_arraytype__100f8_is_real(self): + cf_var = self._make(shape=(100,), dtype="f8") + var_data = _get_cf_var_data(cf_var, self.filename) + self.assertIs(var_data, mock.sentinel.real_data_accessed) + + def test_cf_data_emulation(self): + # Check that a variable emulation object passes its real data directly. + emulated_data = mock.Mock() + # Make a cf_var with a special extra '_data_array' property. + cf_var = self._make(chunksizes=None, _data_array=emulated_data) + result = _get_cf_var_data(cf_var, self.filename) + # This should get directly returned. + self.assertIs(emulated_data, result) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py index 6e28a2f8e4..b67c546aa0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py @@ -60,6 +60,7 @@ def _make(self, names, attrs): cf_name="DUMMY_VAR", cf_group=coords, shape=shape, + size=np.prod(shape), ) return cf, cf_var @@ -139,6 +140,7 @@ def _make(self, attrs): cf_group=mock.Mock(), cf_attrs_unused=cf_attrs_unused, shape=shape, + size=np.prod(shape), ) return cf_var diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py rename to lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 39992d03a0..1a2ef1d29d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -25,7 +25,8 @@ from iris.coords import AncillaryVariable, CellMeasure from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.mesh import MeshCoord -from iris.fileformats.netcdf import load_cubes, logger +from iris.fileformats.netcdf import logger +from iris.fileformats.netcdf.loader import load_cubes from iris.tests.stock.netcdf import ncgen_from_cdl diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py similarity index 94% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 174a46fdb7..12af318c01 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.Saver` class.""" +"""Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -13,7 +13,6 @@ from contextlib import contextmanager from unittest import mock -import netCDF4 as nc import numpy as np from numpy import ma @@ -32,7 +31,7 @@ ) from iris.coords import AuxCoord, DimCoord from iris.cube import Cube -from iris.fileformats.netcdf import Saver +from iris.fileformats.netcdf import Saver, _thread_safe_nc import iris.tests.stock as stock @@ -203,12 +202,18 @@ def test_big_endian(self): def test_zlib(self): cube = self._simple_cube(">f4") - api = self.patch("iris.fileformats.netcdf.saver.netCDF4") + api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) - with Saver("/dummy/path", "NETCDF4") as saver: + # Mock the apparent dtype of mocked variables, to avoid an error. + ref = api.DatasetWrapper.return_value + ref = ref.createVariable.return_value + ref.dtype = np.dtype(np.float32) + # NOTE: use compute=False as otherwise it gets in a pickle trying to construct + # a fill-value report on a non-compliant variable in a non-file (!) + with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) - dataset = api.Dataset.return_value + dataset = api.DatasetWrapper.return_value create_var_call = mock.call( "air_pressure_anomaly", np.dtype("float32"), @@ -249,7 +254,7 @@ def test_default_unlimited_dimensions(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertFalse(ds.dimensions["dim0"].isunlimited()) self.assertFalse(ds.dimensions["dim1"].isunlimited()) ds.close() @@ -259,7 +264,7 @@ def test_no_unlimited_dimensions(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=None) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) for dim in ds.dimensions.values(): self.assertFalse(dim.isunlimited()) ds.close() @@ -281,7 +286,7 @@ def test_custom_unlimited_dimensions(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=unlimited_dimensions) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) for dim in unlimited_dimensions: self.assertTrue(ds.dimensions[dim].isunlimited()) ds.close() @@ -290,7 +295,7 @@ def test_custom_unlimited_dimensions(self): coords = [cube.coord(dim) for dim in unlimited_dimensions] with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=coords) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) for dim in unlimited_dimensions: self.assertTrue(ds.dimensions[dim].isunlimited()) ds.close() @@ -301,7 +306,7 @@ def test_reserved_attributes(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) res = ds.getncattr("dimensions") ds.close() self.assertEqual(res, "something something_else") @@ -323,7 +328,7 @@ def test_dimensional_to_scalar(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) # Confirm that the only dimension is the one denoting the number # of bounds - have successfully saved the 2D bounds array into 1D. self.assertEqual(["bnds"], list(ds.dimensions.keys())) @@ -363,7 +368,7 @@ def _check_bounds_setting(self, climatological=False): saver._ensure_valid_dtype.return_value = mock.Mock( shape=coord.bounds.shape, dtype=coord.bounds.dtype ) - var = mock.MagicMock(spec=nc.Variable) + var = mock.MagicMock(spec=_thread_safe_nc.VariableWrapper) # Make the main call. Saver._create_cf_bounds(saver, coord, var, "time") @@ -404,7 +409,7 @@ def test_valid_range_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual(ds.valid_range, vrange) ds.close() @@ -416,7 +421,7 @@ def test_valid_min_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual(ds.valid_min, 1) ds.close() @@ -428,7 +433,7 @@ def test_valid_max_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual(ds.valid_max, 2) ds.close() @@ -448,7 +453,7 @@ def test_valid_range_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual( ds.variables["longitude"].valid_range, vrange ) @@ -462,7 +467,7 @@ def test_valid_min_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual(ds.variables["longitude"].valid_min, 1) ds.close() @@ -474,7 +479,7 @@ def test_valid_max_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) self.assertArrayEqual(ds.variables["longitude"].valid_max, 2) ds.close() @@ -506,7 +511,7 @@ def _netCDF_var(self, cube, **kwargs): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, **kwargs) - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) (var,) = [ var for var in ds.variables.values() @@ -572,7 +577,7 @@ def test_contains_default_fill_value(self): # Test that a warning is raised if the data contains the default fill # value if no fill_value argument is supplied. cube = self._make_cube(">f4") - cube.data[0, 0] = nc.default_fillvals["f4"] + cube.data[0, 0] = _thread_safe_nc.default_fillvals["f4"] with self.assertWarnsRegex( UserWarning, "contains unmasked data points equal to the fill-value", @@ -647,7 +652,17 @@ def setUp(self): self.container = mock.Mock(name="container", attributes={}) self.data_dtype = np.dtype("int32") - patch = mock.patch("netCDF4.Dataset") + # We need to create mock datasets which look like they are closed. + dataset_class = mock.Mock( + return_value=mock.Mock( + # Mock dataset : the isopen() call should return 0. + isopen=mock.Mock(return_value=0) + ) + ) + patch = mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + dataset_class, + ) _ = patch.start() self.addCleanup(patch.stop) @@ -661,7 +676,7 @@ def assertAttribute(self, value): def check_attribute_compliance_call(self, value): self.set_attribute(value) - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: saver.check_attribute_compliance(self.container, self.data_dtype) @@ -770,7 +785,7 @@ def test_valid_range_and_valid_min_valid_max_provided(self): self.container.attributes["valid_range"] = [1, 2] self.container.attributes["valid_min"] = [1] msg = 'Both "valid_range" and "valid_min"' - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: with self.assertRaisesRegex(ValueError, msg): saver.check_attribute_compliance( self.container, self.data_dtype @@ -1050,38 +1065,5 @@ def test_geo_cs(self): self._test(coord_system, expected) -class Test__create_cf_cell_measure_variable(tests.IrisTest): - # Saving of masked data is disallowed. - - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def setUp(self): - self.cube = stock.lat_lon_cube() - self.names_map = ["latitude", "longitude"] - masked_array = self.array_lib.ma.masked_array( - [0, 1, 2], mask=[True, False, True] - ) - self.cm = iris.coords.CellMeasure(masked_array, var_name="cell_area") - self.cube.add_cell_measure(self.cm, data_dims=0) - self.exp_emsg = "Cell measures with missing data are not supported." - - def test_masked_data__insitu(self): - # Test that the error is raised in the right place. - with self.temp_filename(".nc") as nc_path: - saver = Saver(nc_path, "NETCDF4") - with self.assertRaisesRegex(ValueError, self.exp_emsg): - saver._create_generic_cf_array_var( - self.cube, self.names_map, self.cm - ) - - def test_masked_data__save_pipeline(self): - # Test that the right error is raised by the saver pipeline. - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - with self.assertRaisesRegex(ValueError, self.exp_emsg): - saver.write(self.cube) - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py similarity index 95% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index eab09b9e4f..e1211dc276 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -14,7 +14,7 @@ from iris.coords import AuxCoord from iris.fileformats.netcdf import Saver from iris.tests import stock -from iris.tests.unit.fileformats.netcdf import test_Saver +from iris.tests.unit.fileformats.netcdf.saver import test_Saver class LazyMixin(tests.IrisTest): @@ -82,12 +82,6 @@ class Test_check_attribute_compliance__exception_handling( pass -class Test__create_cf_cell_measure_variable( - LazyMixin, test_Saver.Test__create_cf_cell_measure_variable -): - pass - - class TestStreamed(tests.IrisTest): def setUp(self): self.cube = stock.simple_2d() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py new file mode 100644 index 0000000000..9686c88abf --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -0,0 +1,187 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. + +The behaviour of this method is complex, and this only tests certain aspects. +The testing of the dask delayed operations and file writing are instead covered by +integration tests. + +""" +from unittest import mock +import warnings + +import dask.array as da +import numpy as np +import pytest + +import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc +from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo + + +class Test__lazy_stream_data: + @staticmethod + @pytest.fixture(autouse=True) + def saver_patch(): + # Install patches, so we can create a Saver without opening a real output file. + # Mock just enough of Dataset behaviour to allow a 'Saver.complete()' call. + mock_dataset = mock.MagicMock() + mock_dataset_class = mock.Mock(return_value=mock_dataset) + # Mock the wrapper within the netcdf saver + target1 = ( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" + ) + # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is + # used by NetCDFDataProxy and NetCDFWriteProxy. + target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" + with mock.patch(target1, mock_dataset_class): + with mock.patch(target2, mock_dataset_class): + yield + + # A fixture to parametrise tests over delayed and non-delayed Saver type. + # NOTE: this only affects the saver context-exit, which we do not test here, so + # should make ***no difference to any of these tests***. + @staticmethod + @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) + def compute(request) -> bool: + yield request.param + + # A fixture to parametrise tests over real and lazy-type data. + @staticmethod + @pytest.fixture(params=["realdata", "lazydata", "emulateddata"]) + def data_form(request) -> bool: + yield request.param + + @staticmethod + def saver(compute) -> Saver: + # Create a test Saver object + return Saver( + filename="", netcdf_format="NETCDF4", compute=compute + ) + + @staticmethod + def mock_var(shape, with_data_array): + # Create a test cf_var object. + # N.B. using 'spec=' so we can control whether it has a '_data_array' property. + if with_data_array: + extra_properties = { + "_data_array": mock.sentinel.initial_data_array + } + else: + extra_properties = {} + mock_cfvar = mock.MagicMock( + spec=threadsafe_nc.VariableWrapper, + shape=tuple(shape), + dtype=np.dtype(np.float32), + **extra_properties, + ) + # Give the mock cf-var a name property, as required by '_lazy_stream_data'. + # This *can't* be an extra kwarg to MagicMock __init__, since that already + # defines a specific 'name' kwarg, with a different purpose. + mock_cfvar.name = "" + return mock_cfvar + + def test_data_save(self, compute, data_form): + """Real data is transferred immediately, lazy data creates a delayed write.""" + saver = self.saver(compute=compute) + + data = np.arange(5.0) + if data_form == "lazydata": + data = da.from_array(data) + + cf_var = self.mock_var( + data.shape, with_data_array=(data_form == "emulateddata") + ) + fill_value = -1.0 # not occurring in data + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + if data_form == "lazydata": + expect_n_setitem = 0 + expect_n_delayed = 1 + elif data_form == "realdata": + expect_n_setitem = 1 + expect_n_delayed = 0 + else: + assert data_form == "emulateddata" + expect_n_setitem = 0 + expect_n_delayed = 0 + + assert cf_var.__setitem__.call_count == expect_n_setitem + assert len(saver._delayed_writes) == expect_n_delayed + + if data_form == "lazydata": + result_data, result_writer, fill_info = saver._delayed_writes[0] + assert result_data is data + assert isinstance(result_writer, threadsafe_nc.NetCDFWriteProxy) + assert isinstance(fill_info, _FillvalueCheckInfo) + elif data_form == "realdata": + cf_var.__setitem__.assert_called_once_with(slice(None), data) + else: + assert data_form == "emulateddata" + cf_var._data_array == mock.sentinel.exact_data_array + + def test_warnings(self, compute, data_form): + """ + For real data, fill-value warnings are issued immediately. + For lazy data, warnings are returned from computing a delayed completion. + For 'emulated' data (direct array transfer), no checks + no warnings ever. + + N.B. The 'compute' keyword has **no effect** on this : It only causes delayed + writes to be automatically actioned on exiting a Saver context. + Streaming *always* creates delayed writes for lazy data, since this is required + to make dask distributed operation work. + """ + saver = self.saver(compute=compute) + + data = np.arange(5.0) + if data_form == "lazydata": + data = da.from_array(data) + + fill_value = 2.0 # IS occurring in data + cf_var = self.mock_var( + data.shape, with_data_array=(data_form == "emulateddata") + ) + + # Do initial save. When compute=True, this issues warnings + with warnings.catch_warnings(record=True) as logged_warnings: + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + + # Check warnings issued by initial call. + issued_warnings = [log.message for log in logged_warnings] + if data_form == "lazydata": + n_expected_warnings = 0 + elif data_form == "realdata": + n_expected_warnings = 1 + else: + # No checks in the emulated case + assert data_form == "emulateddata" + n_expected_warnings = 0 + assert len(issued_warnings) == n_expected_warnings + + # Complete the write : any delayed warnings should be *returned*. + # NOTE: + # (1) this still works when there are no delayed writes. + # (2) the Saver 'compute' keyword makes no difference to this usage, as it + # *only* affects what happens when the saver context exits. + result2 = saver.delayed_completion().compute() + issued_warnings += list(result2) + + # Check warnings issued during 'completion'. + if data_form == "emulateddata": + # No checks in this case, ever. + n_expected_warnings = 0 + else: + # Otherwise, either way, a suitable warning should now have been produced. + n_expected_warnings = 1 + assert len(issued_warnings) == n_expected_warnings + if n_expected_warnings > 0: + warning = issued_warnings[0] + msg = "contains unmasked data points equal to the fill-value, 2.0" + assert isinstance(warning, UserWarning) + assert msg in warning.args[0] diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 18e86a9f57..323b498d9c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -18,7 +18,6 @@ import shutil import tempfile -import netCDF4 as nc import numpy as np from iris import save @@ -26,6 +25,7 @@ from iris.cube import Cube, CubeList from iris.experimental.ugrid.mesh import Connectivity, Mesh from iris.experimental.ugrid.save import save_mesh +from iris.fileformats.netcdf import _thread_safe_nc from iris.tests.stock import realistic_4d XY_LOCS = ("x", "y") @@ -259,7 +259,7 @@ def scan_dataset(filepath): variable's dims. """ - ds = nc.Dataset(filepath) + ds = _thread_safe_nc.DatasetWrapper(filepath) # dims dict is {name: len} dimsdict = {name: dim.size for name, dim in ds.dimensions.items()} # vars dict is {name: {attr:val}} @@ -824,7 +824,7 @@ def test_nonuniform_connectivity(self): self.assertNotIn("_FillValue", fn_props) # For what it's worth, *also* check the actual data array in the file - ds = nc.Dataset(tempfile_path) + ds = _thread_safe_nc.DatasetWrapper(tempfile_path) conn_var = ds.variables[ff_conn_name] data = conn_var[:] ds.close() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py similarity index 69% rename from lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 77209efafc..95a518e4e5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -4,39 +4,48 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the `iris.fileformats.netcdf._FillValueMaskCheckAndStoreTarget` -class. +Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. + +Note: now runs all testcases on both real + lazy data. """ # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip +import collections -from unittest import mock - +import dask.array as da import numpy as np -from iris.fileformats.netcdf.saver import _FillValueMaskCheckAndStoreTarget +from iris.fileformats.netcdf.saver import _data_fillvalue_check -class Test__FillValueMaskCheckAndStoreTarget(tests.IrisTest): +class Check__fillvalueandmasking: def _call_target(self, fill_value, keys, vals): - inner_target = mock.MagicMock() - target = _FillValueMaskCheckAndStoreTarget( - inner_target, fill_value=fill_value - ) + data = np.zeros(20, dtype=np.float32) + if any(np.ma.isMaskedArray(val) for val in vals): + # N.B. array is masked if "vals" is, but has no masked points initially. + data = np.ma.masked_array(data, mask=np.zeros_like(data)) for key, val in zip(keys, vals): - target[key] = val + data[key] = val - calls = [mock.call(key, val) for key, val in zip(keys, vals)] - inner_target.__setitem__.assert_has_calls(calls) + if hasattr(self.arraylib, "compute"): + data = da.from_array(data, chunks=-1) + + results = _data_fillvalue_check( + arraylib=self.arraylib, data=data, check_value=fill_value + ) - return target + if hasattr(results, "compute"): + results = results.compute() - def test___setitem__(self): - self._call_target(None, [1], [2]) + # Return a named tuple, for named-property access to the 2 result values. + result = collections.namedtuple("_", ["is_masked", "contains_value"])( + *results + ) + return result def test_no_fill_value_not_masked(self): # Test when the fill value is not present and the data is not masked @@ -90,3 +99,11 @@ def test_contains_masked_fill_value(self): target = self._call_target(fill_value, keys, vals) self.assertFalse(target.contains_value) self.assertTrue(target.is_masked) + + +class Test__real(Check__fillvalueandmasking, tests.IrisTest): + arraylib = np + + +class Test__lazy(Check__fillvalueandmasking, tests.IrisTest): + arraylib = da diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py new file mode 100644 index 0000000000..b2e4b63e3a --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -0,0 +1,119 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. +""" +import warnings + +import numpy as np +import pytest + +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import ( + SaverFillValueWarning, + _fillvalue_report, + _FillvalueCheckInfo, +) + + +class Test__fillvaluereport: + @pytest.mark.parametrize( + "is_bytes", [True, False], ids=["ByteData", "NonbyteData"] + ) + @pytest.mark.parametrize( + "is_masked", [True, False], ids=["MaskedData", "NonmaskedData"] + ) + @pytest.mark.parametrize( + "contains_fv", [True, False], ids=["FillInData", "NofillInData"] + ) + @pytest.mark.parametrize( + "given_user_fv", [True, False], ids=["WithUserfill", "NoUserfill"] + ) + def test_fillvalue_checking( + self, is_bytes, is_masked, contains_fv, given_user_fv + ): + dtype_code = "u1" if is_bytes else "f4" + dtype = np.dtype(dtype_code) + if given_user_fv: + user_fill = 123 if is_bytes else 1.234 + check_value = user_fill + else: + user_fill = None + check_value = default_fillvals[dtype_code] + + fill_info = _FillvalueCheckInfo( + user_value=user_fill, + check_value=check_value, + dtype=dtype, + varname="", + ) + + # Work out expected action, according to intended logic. + if is_bytes and is_masked and not given_user_fv: + msg_fragment = "'' contains byte data with masked points" + elif contains_fv: + msg_fragment = "'' contains unmasked data points equal to the fill-value" + else: + msg_fragment = None + + # Trial the action + result = _fillvalue_report( + fill_info, + is_masked=is_masked, + contains_fill_value=contains_fv, + warn=False, + ) + + # Check the result + if msg_fragment is None: + assert result is None + else: + assert isinstance(result, Warning) + assert msg_fragment in result.args[0] + + @pytest.mark.parametrize( + "has_collision", + [True, False], + ids=["WithFvCollision", "NoFvCollision"], + ) + def test_warn(self, has_collision): + fill_info = _FillvalueCheckInfo( + user_value=1.23, + check_value=1.23, + dtype=np.float32, + varname="", + ) + + # Check results + if has_collision: + # Check that we get the expected warning + expected_msg = "'' contains unmasked data points equal to the fill-value" + # Enter a warnings context that checks for the error. + warning_context = pytest.warns( + SaverFillValueWarning, match=expected_msg + ) + warning_context.__enter__() + else: + # Check that we get NO warning of the expected type. + warnings.filterwarnings("error", category=SaverFillValueWarning) + + # Do call: it should raise AND return a warning, ONLY IF there was a collision. + result = _fillvalue_report( + fill_info, + is_masked=True, + contains_fill_value=has_collision, + warn=True, + ) + + # Check result + if has_collision: + # Fail if no warning was raised .. + warning_context.__exit__(None, None, None) + # .. or result does not have the expected message content + assert expected_msg in result.args[0] + else: + # Fail if any warning result was produced. + assert result is None diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py similarity index 75% rename from lib/iris/tests/unit/fileformats/netcdf/test_save.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index 030edbfce2..68049b57fc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.save` function.""" - +"""Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip @@ -14,14 +13,19 @@ from tempfile import mkdtemp from unittest import mock -import netCDF4 as nc import numpy as np +import pytest import iris from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD -from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, save +from iris.fileformats.netcdf import ( + CF_CONVENTIONS_VERSION, + Saver, + _thread_safe_nc, + save, +) from iris.tests.stock import lat_lon_cube from iris.tests.stock.mesh import sample_mesh_cube @@ -38,7 +42,7 @@ def test_custom_conventions__ignored(self): # CF convention. with self.temp_filename(".nc") as nc_path: save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) res = ds.getncattr("Conventions") ds.close() self.assertEqual(res, CF_CONVENTIONS_VERSION) @@ -49,7 +53,7 @@ def test_custom_conventions__allowed(self): with mock.patch.object(self.options, "conventions_override", True): with self.temp_filename(".nc") as nc_path: save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) res = ds.getncattr("Conventions") ds.close() self.assertEqual(res, self.custom_conventions) @@ -61,7 +65,7 @@ def test_custom_conventions__allowed__missing(self): with mock.patch.object(self.options, "conventions_override", True): with self.temp_filename(".nc") as nc_path: save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) res = ds.getncattr("Conventions") ds.close() self.assertEqual(res, CF_CONVENTIONS_VERSION) @@ -76,7 +80,7 @@ def test_attributes_arrays(self): with self.temp_filename("foo.nc") as nc_out: save([c1, c2], nc_out) - ds = nc.Dataset(nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) res = ds.getncattr("bar") ds.close() self.assertArrayEqual(res, np.arange(2)) @@ -92,7 +96,7 @@ def test_no_special_attribute_clash(self): with self.temp_filename("foo.nc") as nc_out: save([c1, c2], nc_out) - ds = nc.Dataset(nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) res = ds.variables["test"].getncattr("name") res_1 = ds.variables["test_1"].getncattr("name") ds.close() @@ -105,7 +109,7 @@ def test_no_unlimited_dims(self): cube = lat_lon_cube() with self.temp_filename("foo.nc") as nc_out: save(cube, nc_out) - ds = nc.Dataset(nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) self.assertFalse(ds.dimensions["latitude"].isunlimited()) def test_unlimited_dim_latitude(self): @@ -113,7 +117,7 @@ def test_unlimited_dim_latitude(self): unlim_dim_name = "latitude" with self.temp_filename("foo.nc") as nc_out: save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) - ds = nc.Dataset(nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) self.assertTrue(ds.dimensions[unlim_dim_name].isunlimited()) @@ -356,5 +360,104 @@ def test_connectivity_dim_varname_collision(self): self._check_save_and_reload([cube_1, cube_2]) +class Test_compute_usage: + """ + Test the operation of the save function 'compute' keyword. + + In actual use, this keyword controls 'delayed saving'. That is tested elsewhere, + in testing the 'Saver' class itself. + """ + + # A fixture to mock out Saver object creation in a 'save' call. + @staticmethod + @pytest.fixture + def mock_saver_creation(): + # A mock for a Saver object. + mock_saver = mock.MagicMock(spec=Saver) + # make an __enter__ call return the object itself (as the real Saver does). + mock_saver.__enter__ = mock.Mock(return_value=mock_saver) + # A mock for the Saver() constructor call. + mock_new_saver_call = mock.Mock(return_value=mock_saver) + + # Replace the whole Saver class with a simple function, which thereby emulates + # the constructor call. This avoids complications due to the fact that Mock + # patching does not work in the usual way for __init__ and __new__ methods. + def mock_saver_class_create(*args, **kwargs): + return mock_new_saver_call(*args, **kwargs) + + # Patch the Saver() creation to return our mock Saver object. + with mock.patch( + "iris.fileformats.netcdf.saver.Saver", mock_saver_class_create + ): + # Return mocks for both constructor call, and Saver object. + yield mock_new_saver_call, mock_saver + + # A fixture to provide some mock args for 'Saver' creation. + @staticmethod + @pytest.fixture + def mock_saver_args(): + from collections import namedtuple + + # A special object for the cube, since cube.attributes must be indexable + mock_cube = mock.MagicMock() + args = namedtuple( + "saver_args", ["cube", "filename", "format", "compute"] + )( + cube=mock_cube, + filename=mock.sentinel.filepath, + format=mock.sentinel.netcdf4, + compute=mock.sentinel.compute, + ) + return args + + def test_saver_creation(self, mock_saver_creation, mock_saver_args): + # Check that 'save' creates a Saver, passing the 'compute' keyword. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=args.compute, + ) + # Check the Saver create call it made, in particular that the compute arg is + # passed in. + mock_saver_new.assert_called_once_with( + args.filename, args.format, compute=args.compute + ) + + def test_compute_true(self, mock_saver_creation, mock_saver_args): + # Check operation when compute=True. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=True, + ) + # It should NOT have called 'delayed_completion' + assert mock_saver.delayed_completion.call_count == 0 + # Result should be None + assert result is None + + def test_compute_false_result_delayed( + self, mock_saver_creation, mock_saver_args + ): + # Check operation when compute=False. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=False, + ) + # It should have called 'delayed_completion' .. + assert mock_saver.delayed_completion.call_count == 1 + # .. and should return the result of that. + assert result is mock_saver.delayed_completion.return_value + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py new file mode 100644 index 0000000000..4d627a706b --- /dev/null +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -0,0 +1,27 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Test function :func:`iris._lazy data.is_lazy_masked_data`.""" + +import dask.array as da +import numpy as np +import pytest + +from iris._lazy_data import is_lazy_masked_data + +real_arrays = [ + np.arange(3), + np.ma.array(range(3)), + np.ma.array(range(3), mask=[0, 1, 1]), +] +lazy_arrays = [da.from_array(arr) for arr in real_arrays] + + +@pytest.mark.parametrize( + "arr, expected", zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) +) +def test_is_lazy_masked_data(arr, expected): + result = is_lazy_masked_data(arr) + assert result is expected diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py index c4416c587d..8e2d4f226b 100644 --- a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -37,6 +37,13 @@ def test_preserve_position(self): expected.get_position().bounds, result.get_position().bounds ) + def test_ax_on_subfigure(self): + subfig, _ = self.fig.subfigures(nrows=2) + subfig.subplots() + _replace_axes_with_cartopy_axes(ccrs.PlateCarree()) + result = plt.gca() + self.assertIs(result.get_figure(), subfig) + def tearDown(self): plt.close(self.fig) diff --git a/lib/iris/tests/unit/plot/test_hist.py b/lib/iris/tests/unit/plot/test_hist.py new file mode 100644 index 0000000000..8a74ff8701 --- /dev/null +++ b/lib/iris/tests/unit/plot/test_hist.py @@ -0,0 +1,51 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.plot.hist` function.""" +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from unittest import mock + +import numpy as np +import pytest + +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord +from iris.cube import Cube + +if tests.MPL_AVAILABLE: + import iris.plot as iplt + + +@tests.skip_plot +class Test: + @pytest.fixture(autouse=True) + def create_data(self): + self.data = np.array([0, 100, 110, 120, 200, 320]) + + @pytest.mark.parametrize( + "x", [AuxCoord, Cube, DimCoord, CellMeasure, AncillaryVariable] + ) + def test_simple(self, x): + with mock.patch("matplotlib.pyplot.hist") as mocker: + iplt.hist(x(self.data)) + # mocker.assert_called_once_with is not working as expected with + # _DimensionalMetadata objects so we use np.testing array equality + # checks instead. + args, kwargs = mocker.call_args + assert len(args) == 1 + np.testing.assert_array_equal(args[0], self.data) + + def test_kwargs(self): + cube = Cube(self.data) + bins = [0, 150, 250, 350] + with mock.patch("matplotlib.pyplot.hist") as mocker: + iplt.hist(cube, bins=bins) + mocker.assert_called_once_with(self.data, bins=bins) + + def test_unsupported_input(self): + with pytest.raises(TypeError, match="x must be a"): + iplt.hist(self.data) diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index edbef3934a..1ed2da1b13 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -72,7 +72,7 @@ def test_plot_longitude(self): class TestTrajectoryWrap(tests.IrisTest): """ Test that a line plot of geographic coordinates wraps around the end of the - coordinates rather than plotting accross the map. + coordinates rather than plotting across the map. """ diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 21fc8efa73..65fb115243 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -537,10 +537,11 @@ def test_section_cell_methods(self): expected = [ "name / (1) (-- : 1)", " Cell methods:", - " stdev area", - " mean y (10m, vertical), time (3min, =duration)", + " 0 area: stdev", + " 1 y: time: mean (interval: 10m" + " interval: 3min comment: vertical comment: =duration)", ] - self.assertEqual(rep, expected) + self.assertEqual(expected, rep) def test_unstructured_cube(self): # Check a sample mesh-cube against the expected result. diff --git a/lib/iris/tests/unit/representation/cube_printout/test_Table.py b/lib/iris/tests/unit/representation/cube_printout/test_Table.py index 2ff6738998..e5dba52c61 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_Table.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_Table.py @@ -60,7 +60,7 @@ def test_copy(self): def test_add_row(self): table = Table() self.assertEqual(table.n_columns, None) - # Add onw row. + # Add one row. table.add_row(["one", "two", "three"], aligns=["left", "left", "left"]) self.assertEqual(len(table.rows), 1) self.assertEqual(table.n_columns, 3) diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index bcf31a016f..d81f680df5 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -206,7 +206,7 @@ def test_cell_methods(self): rep = CubeSummary(cube) cell_method_section = rep.scalar_sections["Cell methods:"] - expected_contents = ["mean: x, y", "mean: x"] + expected_contents = ["0: x: y: mean", "1: x: mean"] self.assertEqual(cell_method_section.contents, expected_contents) def test_scalar_cube(self): diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index 36f00fa53f..3df1634ba5 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -9,6 +9,10 @@ # importing anything else import iris.tests as tests # isort:skip +from unittest import mock + +import dask +import dask.array as da import numpy as np import numpy.ma as ma @@ -40,6 +44,17 @@ def test_added_dimensions_transpose(self): for j in range(4): self.assertArrayEqual(b[i, :, j, :].T, a) + @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) + def test_lazy_added_dimensions_transpose(self, mocked_compute): + # adding dimensions and having the dimensions of the input + # transposed + a = da.random.random([2, 3]) + b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) + mocked_compute.assert_not_called() + for i in range(5): + for j in range(4): + self.assertArrayEqual(b[i, :, j, :].T.compute(), a.compute()) + def test_masked(self): # masked arrays are also accepted a = np.random.random([2, 3]) @@ -49,6 +64,19 @@ def test_masked(self): for j in range(4): self.assertMaskedArrayEqual(b[i, :, j, :].T, m) + @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) + def test_lazy_masked(self, mocked_compute): + # masked arrays are also accepted + a = np.random.random([2, 3]) + m = da.ma.masked_array(a, mask=[[0, 1, 0], [0, 1, 1]]) + b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) + mocked_compute.assert_not_called() + for i in range(5): + for j in range(4): + self.assertMaskedArrayEqual( + b[i, :, j, :].compute().T, m.compute() + ) + def test_masked_degenerate(self): # masked arrays can have degenerate masks too a = np.random.random([2, 3]) diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py index e939416e7d..9e043c71bd 100644 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ b/lib/iris/tests/unit/util/test_find_discontiguities.py @@ -29,26 +29,55 @@ def setUp(self): # Set up a 2d lat-lon cube with 2d coordinates that have been # transformed so they are not in a regular lat-lon grid. # Then generate a discontiguity at a single lat-lon point. - self.testcube_discontig = full2d_global() - make_bounds_discontiguous_at_point(self.testcube_discontig, 3, 3) - # Repeat that for a discontiguity in the grid 'Y' direction. - self.testcube_discontig_along_y = full2d_global() + # Discontiguities will be caused at the rightmost bounds. + self.testcube_discontig_right = full2d_global() + make_bounds_discontiguous_at_point(self.testcube_discontig_right, 3, 3) + + # Repeat for a discontiguity on the leftmost bounds. + self.testcube_discontig_left = full2d_global() + make_bounds_discontiguous_at_point( + self.testcube_discontig_left, 2, 4, upper=False + ) + # Repeat for a discontiguity on the topmost bounds. + self.testcube_discontig_top = full2d_global() make_bounds_discontiguous_at_point( - self.testcube_discontig_along_y, 2, 4, in_y=True + self.testcube_discontig_top, 2, 4, in_y=True ) - def test_find_discontiguities(self): + # Repeat for a discontiguity on the botommost bounds. + self.testcube_discontig_along_bottom = full2d_global() + make_bounds_discontiguous_at_point( + self.testcube_discontig_along_bottom, 2, 4, in_y=True, upper=False + ) + + def test_find_discontiguities_right(self): + # Check that the mask we generate when making the discontiguity + # matches that generated by find_discontiguities + cube = self.testcube_discontig_right + expected = cube.data.mask + returned = find_discontiguities(cube) + self.assertTrue(np.all(expected == returned)) + + def test_find_discontiguities_left(self): + # Check that the mask we generate when making the discontiguity + # matches that generated by find_discontiguities + cube = self.testcube_discontig_left + expected = cube.data.mask + returned = find_discontiguities(cube) + self.assertTrue(np.all(expected == returned)) + + def test_find_discontiguities_top(self): # Check that the mask we generate when making the discontiguity # matches that generated by find_discontiguities - cube = self.testcube_discontig + cube = self.testcube_discontig_top expected = cube.data.mask returned = find_discontiguities(cube) self.assertTrue(np.all(expected == returned)) - def test_find_discontiguities_in_y(self): + def test_find_discontiguities_bottom(self): # Check that the mask we generate when making the discontiguity # matches that generated by find_discontiguities - cube = self.testcube_discontig_along_y + cube = self.testcube_discontig_along_bottom expected = cube.data.mask returned = find_discontiguities(cube) self.assertTrue(np.all(expected == returned)) @@ -61,7 +90,7 @@ def test_find_discontiguities_1d_coord(self): find_discontiguities(cube) def test_find_discontiguities_with_atol(self): - cube = self.testcube_discontig + cube = self.testcube_discontig_right # Choose a very large absolute tolerance which will result in fine # discontiguities being disregarded atol = 100 @@ -72,7 +101,7 @@ def test_find_discontiguities_with_atol(self): self.assertTrue(np.all(expected == returned)) def test_find_discontiguities_with_rtol(self): - cube = self.testcube_discontig + cube = self.testcube_discontig_right # Choose a very large relative tolerance which will result in fine # discontiguities being disregarded rtol = 1000 diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index d81f2c40d7..a6374f97ad 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -116,8 +116,20 @@ def test_promote_scalar_auxcoord(self, stock_cube): ] self._assert_cube_notis(result, stock_cube) + def test_existing_dim_coord(self, stock_cube): + # Provide an existing dimensional coordinate + coord = iris.coords.DimCoord(1, long_name="dim") + stock_cube.add_aux_coord(coord) + + new_cube = iris.util.new_axis(stock_cube, coord) + with pytest.raises( + ValueError, match="is already a dimension coordinate." + ): + iris.util.new_axis(new_cube, coord) + def test_promote_non_scalar(self, stock_cube): # Provide a dimensional coordinate which is not scalar + iris.util.demote_dim_coord_to_aux_coord(stock_cube, "foo") with pytest.raises(ValueError, match="is not a scalar coordinate."): new_axis(stock_cube, "foo") diff --git a/lib/iris/util.py b/lib/iris/util.py index 9e0db9e66e..0b31ebdafc 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -23,7 +23,7 @@ import numpy.ma as ma from iris._deprecation import warn_deprecated -from iris._lazy_data import as_concrete_data, is_lazy_data +from iris._lazy_data import as_concrete_data, is_lazy_data, is_lazy_masked_data from iris.common import SERVICES from iris.common.lenient import _lenient_client import iris.exceptions @@ -34,8 +34,7 @@ def broadcast_to_shape(array, shape, dim_map): Broadcast an array to a given shape. Each dimension of the array must correspond to a dimension in the - given shape. Striding is used to repeat the array until it matches - the desired shape, returning repeated views on the original array. + given shape. The result is a read-only view (see :func:`numpy.broadcast_to`). If you need to write to the resulting array, make a copy first. Args: @@ -70,36 +69,36 @@ def broadcast_to_shape(array, shape, dim_map): # a is an array of shape (48, 96) result = broadcast_to_shape(a, (96, 48, 12), (1, 0)) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ - if len(dim_map) != array.ndim: - # We must check for this condition here because we cannot rely on - # getting an error from numpy if the dim_map argument is not the - # correct length, we might just get a segfault. - raise ValueError( - "dim_map must have an entry for every " - "dimension of the input array" - ) + n_orig_dims = len(array.shape) + n_new_dims = len(shape) - n_orig_dims + array = array.reshape(array.shape + (1,) * n_new_dims) + + # Get dims in required order. + array = np.moveaxis(array, range(n_orig_dims), dim_map) + new_array = np.broadcast_to(array, shape) - def _broadcast_helper(a): - strides = [0] * len(shape) - for idim, dim in enumerate(dim_map): - if shape[dim] != a.shape[idim]: - # We'll get garbage values if the dimensions of array are not - # those indicated by shape. - raise ValueError("shape and array are not compatible") - strides[dim] = a.strides[idim] - return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides) - - array_view = _broadcast_helper(array) - if ma.isMaskedArray(array): - if array.mask is ma.nomask: - # Degenerate masks can be applied as-is. - mask_view = array.mask + if ma.isMA(array): + # broadcast_to strips masks so we need to handle them explicitly. + mask = ma.getmask(array) + if mask is ma.nomask: + new_mask = ma.nomask else: - # Mask arrays need to be handled in the same way as the data array. - mask_view = _broadcast_helper(array.mask) - array_view = ma.array(array_view, mask=mask_view) - return array_view + new_mask = np.broadcast_to(mask, shape) + new_array = ma.array(new_array, mask=new_mask) + + elif is_lazy_masked_data(array): + # broadcast_to strips masks so we need to handle them explicitly. + mask = da.ma.getmaskarray(array) + new_mask = da.broadcast_to(mask, shape) + new_array = da.ma.masked_array(new_array, new_mask) + + return new_array def delta(ndarray, dimension, circular=False): @@ -142,6 +141,11 @@ def delta(ndarray, dimension, circular=False): >>> iris.util.delta(original, 0, circular=360) array([90, 90, 90, 90]) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if circular is not False: _delta = np.roll(ndarray, -1, axis=dimension) @@ -192,6 +196,11 @@ def describe_diff(cube_a, cube_b, output_file=None): two cubes will merge requires additional logic that is beyond the scope of this function. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if output_file is None: @@ -244,6 +253,11 @@ def guess_coord_axis(coord): Returns: 'T', 'Z', 'Y', 'X', or None. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ axis = None @@ -305,6 +319,11 @@ def rolling_window(a, window=1, step=1, axis=-1): array([[ 1., 2., 3.], [ 6., 7., 8.]]) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # NOTE: The implementation of this function originates from # https://github.com/numpy/numpy/pull/31#issuecomment-1304851 04/08/2011 @@ -359,6 +378,10 @@ def array_equal(array1, array2, withnans=False): This provides much the same functionality as :func:`numpy.array_equal`, but with additional support for arrays of strings and NaN-tolerant operation. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ def normalise_array(array): @@ -407,6 +430,11 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): if the actual error equals the maximum, whereas :func:`util.approx_equal` will return False. + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ wmsg = ( "iris.util.approx_equal has been deprecated and will be removed, " @@ -456,6 +484,11 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): for i in range(10): print(i, between_3_and_6(i)) + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if lh_inclusive and rh_inclusive: return lambda c: lh <= c <= rh @@ -510,6 +543,11 @@ def reverse(cube_or_array, coords_or_dims): [19 18 17 16] [15 14 13 12]]] + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ from iris.cube import Cube @@ -588,6 +626,11 @@ def monotonic(array, strict=False, return_direction=False): ``(monotonic_status, direction)`` + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if array.ndim != 1 or len(array) <= 1: raise ValueError( @@ -640,6 +683,11 @@ def column_slices_generator(full_slice, ndims): This method was developed as numpy does not support the direct approach of [(3, 5), : , (1, 6, 8)] for column based indexing. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ list_of_slices = [] @@ -1035,6 +1083,10 @@ def clip_string(the_str, clip_length=70, rider="..."): If the clip length was greater than the original string, the original string is returned unaltered. + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ if clip_length >= len(the_str) or clip_length <= 0: @@ -1065,37 +1117,26 @@ def format_array(arr): For customisations, use the :mod:`numpy.core.arrayprint` directly. + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ - summary_threshold = 85 - summary_insert = "..." if arr.size > summary_threshold else "" - edge_items = 3 - ffunc = str max_line_len = 50 - # Format the array with version 1.13 legacy behaviour - with np.printoptions(legacy="1.13"): - # Use this (private) routine for more control. - formatArray = np.core.arrayprint._formatArray - # N.B. the 'legacy' arg had different forms in different numpy versions - # -- fetch the required form from the internal options dict - format_options_legacy = np.core.arrayprint._format_options["legacy"] - - result = formatArray( - arr, - ffunc, - max_line_len, - next_line_prefix="\t\t", - separator=", ", - edge_items=edge_items, - summary_insert=summary_insert, - legacy=format_options_legacy, - ) + result = np.array2string( + arr, + max_line_len, + separator=", ", + threshold=85, + ) return result -def new_axis(src_cube, scalar_coord=None, expand_extras=()): +def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy """ Create a new axis as the leading dimension of the cube, promoting a scalar coordinate if specified. @@ -1128,6 +1169,11 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): >>> ncube = iris.util.new_axis(cube, 'time') >>> ncube.shape (1, 360, 360) + + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ def _reshape_data_array(data_manager): @@ -1172,8 +1218,14 @@ def _handle_dimensional_metadata( if scalar_coord is not None: scalar_coord = src_cube.coord(scalar_coord) + try: + src_cube.coord(scalar_coord, dim_coords=False) + except iris.exceptions.CoordinateNotFoundError: + emsg = scalar_coord.name() + " is already a dimension coordinate." + raise ValueError(emsg) + if not scalar_coord.shape == (1,): - emsg = scalar_coord.name() + "is not a scalar coordinate." + emsg = scalar_coord.name() + " is not a scalar coordinate." raise ValueError(emsg) expand_extras = [ @@ -1239,6 +1291,11 @@ def squeeze(cube): >>> ncube.shape (360, 360) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ slices = [ @@ -1308,7 +1365,14 @@ def file_is_newer_than(result_path, source_paths): def is_regular(coord): - """Determine if the given coord is regular.""" + """ + Determine if the given coord is regular. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ try: regular_step(coord) except iris.exceptions.CoordinateNotRegularError: @@ -1319,7 +1383,15 @@ def is_regular(coord): def regular_step(coord): - """Return the regular step from a coord or fail.""" + """ + Return the regular step from a coord or fail. + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError("Expected 1D coord") if coord.shape[0] < 2: @@ -1350,6 +1422,10 @@ def regular_points(zeroth, step, count): count : number The number of point values. + Notes + ------ + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ points = (zeroth + step) + step * np.arange(count, dtype=np.float32) _, regular = iris.util.points_step(points) @@ -1370,6 +1446,12 @@ def points_step(points): ------- numeric, bool A tuple containing the average difference between values, and whether the difference is regular. + + + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ # Calculations only make sense with multiple points points = np.asanyarray(points) @@ -1399,6 +1481,11 @@ def unify_time_units(cubes): * cubes: An iterable containing :class:`iris.cube.Cube` instances. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ epochs = {} @@ -1539,6 +1626,12 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): forecast_period x - - time x - - + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ from iris.coords import Coord, DimCoord @@ -1665,6 +1758,12 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): time x - - year x - - + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ from iris.coords import Coord @@ -1696,7 +1795,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): @functools.wraps(np.meshgrid) def _meshgrid(*xi, **kwargs): """ - @numpy v1.13, the dtype of each output nD coordinate is the same as its + @numpy v1.13, the dtype of each output n-D coordinate is the same as its associated input 1D coordinate. This is not the case prior to numpy v1.13, where the output dtype is cast up to its highest resolution, regardlessly. @@ -1715,14 +1814,15 @@ def _meshgrid(*xi, **kwargs): def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): """ - Searches coord for discontiguities in the bounds array, returned as a - boolean array (True where discontiguities are present). + Searches the 'x' and 'y' coord on the cube for discontiguities in the + bounds array, returned as a boolean array (True for all cells which are + discontiguous with the cell immediately above them or to their right). Args: * cube (`iris.cube.Cube`): The cube to be checked for discontinuities in its 'x' and 'y' - coordinates. + coordinates. These coordinates must be 2D. Kwargs: @@ -1756,6 +1856,12 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): # Plot the masked cube slice: iplt.pcolormesh(masked_cube_slice) + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ lats_and_lons = [ "latitude", @@ -1841,7 +1947,7 @@ def _mask_array(array, points_to_mask, in_place=False): points_to_mask = al.ma.filled(points_to_mask, False) # Get broadcasted views of the arrays. Note that broadcast_arrays does not - # preserve masks, so we need to explicitly handle any exising mask on array. + # preserve masks, so we need to explicitly handle any existing mask on array. array_mask = al.ma.getmaskarray(array) array_data, array_mask, points_to_mask = al.broadcast_arrays( @@ -1865,9 +1971,9 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): """ Masks any cells in the cube's data array which correspond to cells marked ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be - specified as a :class:`numpy.ndarray`, :class:`iris.coords.Coord` or - :class:`iris.cube.Cube`, following the same broadcasting approach as cube - arithmetic (see :ref:`cube maths`). + specified as a :class:`numpy.ndarray`, :class:`dask.array.Array`, + :class:`iris.coords.Coord` or :class:`iris.cube.Cube`, following the same + broadcasting approach as cube arithmetic (see :ref:`cube maths`). Parameters ---------- @@ -1875,7 +1981,7 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): cube : iris.cube.Cube Cube containing data that requires masking. - points_to_mask : numpy.ndarray, iris.coords.Coord or iris.cube.Cube + points_to_mask : numpy.ndarray, dask.array.Array, iris.coords.Coord or iris.cube.Cube Specifies booleans (or ones and zeros) indicating which points will be masked. in_place : bool, default=False @@ -1897,6 +2003,10 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ if in_place and not cube.has_lazy_data(): # Ensure cube data is masked type so we can work on it in-place. @@ -1943,6 +2053,11 @@ def equalise_attributes(cubes): * removed (list): A list of dicts holding the removed attributes. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ removed = [] # Work out which attributes are identical across all the cubes. @@ -1986,6 +2101,12 @@ def is_masked(array): bool Whether or not the array has any masks. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ if is_lazy_data(array): result = da.ma.getmaskarray(array).any().compute() @@ -2002,7 +2123,6 @@ def _strip_metadata_from_dims(cube, dims): To be used by operations that modify or remove dimensions. Note: does nothing to (aux)-coordinates. Those would be handled explicitly by the calling operation. - """ reduced_cube = cube.copy() diff --git a/noxfile.py b/noxfile.py index 8aabf862fb..601a1d576e 100755 --- a/noxfile.py +++ b/noxfile.py @@ -5,13 +5,9 @@ """ -from datetime import datetime import hashlib import os from pathlib import Path -import re -from tempfile import NamedTemporaryFile -from typing import Literal import nox from nox.logger import logger @@ -20,7 +16,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.8"] +_PY_VERSIONS_ALL = ["3.9", "3.10", "3.11"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds @@ -41,7 +37,7 @@ def session_lockfile(session: nox.sessions.Session) -> Path: """Return the path of the session lockfile.""" return Path( - f"requirements/ci/nox.lock/py{session.python.replace('.', '')}-linux-64.lock" + f"requirements/locks/py{session.python.replace('.', '')}-linux-64.lock" ) @@ -91,8 +87,8 @@ def cache_venv(session: nox.sessions.Session) -> None: cache = session_cachefile(session) with open(lockfile, "rb") as fi: hexdigest = hashlib.sha256(fi.read()).hexdigest() - with open(cache, "w") as fo: - fo.write(hexdigest) + with open(cache, "w") as fout: + fout.write(hexdigest) def cache_cartopy(session: nox.sessions.Session) -> None: @@ -176,6 +172,8 @@ def tests(session: nox.sessions.Session): """ Perform iris system, integration and unit tests. + Coverage testing is enabled if the "--coverage" or "-c" flag is used. + Parameters ---------- session: object @@ -185,13 +183,15 @@ def tests(session: nox.sessions.Session): prepare_venv(session) session.install("--no-deps", "--editable", ".") session.env.update(ENV) - session.run( - "python", - "-m", - "iris.tests.runner", - "--default-tests", - "--system-tests", - ) + run_args = [ + "pytest", + "-n", + "auto", + "lib/iris/tests", + ] + if "-c" in session.posargs or "--coverage" in session.posargs: + run_args[-1:-1] = ["--cov=lib/iris", "--cov-report=xml"] + session.run(*run_args) @nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") @@ -237,10 +237,10 @@ def gallery(session: nox.sessions.Session): session.install("--no-deps", "--editable", ".") session.env.update(ENV) session.run( - "python", - "-m", - "iris.tests.runner", - "--gallery-tests", + "pytest", + "-n", + "auto", + "docs/gallery_tests", ) @@ -299,236 +299,3 @@ def wheel(session: nox.sessions.Session): "import iris; print(f'{iris.__version__=}')", external=True, ) - - -@nox.session -@nox.parametrize( - "run_type", - ["overnight", "branch", "cperf", "sperf", "custom"], - ids=["overnight", "branch", "cperf", "sperf", "custom"], -) -def benchmarks( - session: nox.sessions.Session, - run_type: Literal["overnight", "branch", "cperf", "sperf", "custom"], -): - """ - Perform Iris performance benchmarks (using Airspeed Velocity). - - All run types require a single Nox positional argument (e.g. - ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters - section - and can optionally accept a series of further arguments that will - be added to session's ASV command. - - Parameters - ---------- - session: object - A `nox.sessions.Session` object. - run_type: {"overnight", "branch", "cperf", "sperf", "custom"} - * ``overnight``: benchmarks all commits between the input **first - commit** to ``HEAD``, comparing each to its parent for performance - shifts. If a commit causes shifts, the output is saved to a file: - ``.asv/performance-shifts/``. Designed for checking the - previous 24 hours' commits, typically in a scheduled script. - * ``branch``: Performs the same operations as ``overnight``, but always - on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the - input **base branch**. Output from this run is never saved to a file. - Designed for testing if the active branch's changes cause performance - shifts - anticipating what would be caught by ``overnight`` once - merged. - **For maximum accuracy, avoid using the machine that is running this - session. Run time could be >1 hour for the full benchmark suite.** - * ``cperf``: Run the on-demand CPerf suite of benchmarks (part of the - UK Met Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` - only, and publish the results to the input **publish directory**, - within a unique subdirectory for this run. - * ``sperf``: As with CPerf, but for the SPerf suite. - * ``custom``: run ASV with the input **ASV sub-command**, without any - preset arguments - must all be supplied by the user. So just like - running ASV manually, with the convenience of re-using the session's - scripted setup steps. - - Examples - -------- - * ``nox --session="benchmarks(overnight)" -- a1b23d4`` - * ``nox --session="benchmarks(branch)" -- upstream/main`` - * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` - * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` - * ``nox --session="benchmarks(cperf)" -- my_publish_dir - * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` - - """ - # The threshold beyond which shifts are 'notable'. See `asv compare`` docs - # for more. - COMPARE_FACTOR = 1.2 - - session.install("asv", "nox") - - data_gen_var = "DATA_GEN_PYTHON" - if data_gen_var in os.environ: - print("Using existing data generation environment.") - else: - print("Setting up the data generation environment...") - # Get Nox to build an environment for the `tests` session, but don't - # run the session. Will re-use a cached environment if appropriate. - session.run_always( - "nox", - "--session=tests", - "--install-only", - f"--python={_PY_VERSION_LATEST}", - ) - # Find the environment built above, set it to be the data generation - # environment. - data_gen_python = next( - Path(".nox").rglob(f"tests*/bin/python{_PY_VERSION_LATEST}") - ).resolve() - session.env[data_gen_var] = data_gen_python - - mule_dir = data_gen_python.parents[1] / "resources" / "mule" - if not mule_dir.is_dir(): - print("Installing Mule into data generation environment...") - session.run_always( - "git", - "clone", - "https://github.com/metomi/mule.git", - str(mule_dir), - external=True, - ) - session.run_always( - str(data_gen_python), - "-m", - "pip", - "install", - str(mule_dir / "mule"), - external=True, - ) - - print("Running ASV...") - session.cd("benchmarks") - # Skip over setup questions for a new machine. - session.run("asv", "machine", "--yes") - - # All run types require one Nox posarg. - run_type_arg = { - "overnight": "first commit", - "branch": "base branch", - "cperf": "publish directory", - "sperf": "publish directory", - "custom": "ASV sub-command", - } - if run_type not in run_type_arg.keys(): - message = f"Unsupported run-type: {run_type}" - raise NotImplementedError(message) - if not session.posargs: - message = ( - f"Missing mandatory first Nox session posarg: " - f"{run_type_arg[run_type]}" - ) - raise ValueError(message) - first_arg = session.posargs[0] - # Optional extra arguments to be passed down to ASV. - asv_args = session.posargs[1:] - - def asv_compare(*commits): - """Run through a list of commits comparing each one to the next.""" - commits = [commit[:8] for commit in commits] - shifts_dir = Path(".asv") / "performance-shifts" - for i in range(len(commits) - 1): - before = commits[i] - after = commits[i + 1] - asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" - session.run(*asv_command_.split(" ")) - - if run_type == "overnight": - # Record performance shifts. - # Run the command again but limited to only showing performance - # shifts. - shifts = session.run( - *asv_command_.split(" "), "--only-changed", silent=True - ) - if shifts: - # Write the shifts report to a file. - # Dir is used by .github/workflows/benchmarks.yml, - # but not cached - intended to be discarded after run. - shifts_dir.mkdir(exist_ok=True, parents=True) - shifts_path = (shifts_dir / after).with_suffix(".txt") - with shifts_path.open("w") as shifts_file: - shifts_file.write(shifts) - - # Common ASV arguments for all run_types except `custom`. - asv_harness = ( - "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict " - "--show-stderr" - ) - - if run_type == "overnight": - first_commit = first_arg - commit_range = f"{first_commit}^^.." - asv_command = asv_harness.format(posargs=commit_range) - session.run(*asv_command.split(" "), *asv_args) - - # git rev-list --first-parent is the command ASV uses. - git_command = f"git rev-list --first-parent {commit_range}" - commit_string = session.run( - *git_command.split(" "), silent=True, external=True - ) - commit_list = commit_string.rstrip().split("\n") - asv_compare(*reversed(commit_list)) - - elif run_type == "branch": - base_branch = first_arg - git_command = f"git merge-base HEAD {base_branch}" - merge_base = session.run( - *git_command.split(" "), silent=True, external=True - )[:8] - - with NamedTemporaryFile("w") as hashfile: - hashfile.writelines([merge_base, "\n", "HEAD"]) - hashfile.flush() - commit_range = f"HASHFILE:{hashfile.name}" - asv_command = asv_harness.format(posargs=commit_range) - session.run(*asv_command.split(" "), *asv_args) - - asv_compare(merge_base, "HEAD") - - elif run_type in ("cperf", "sperf"): - publish_dir = Path(first_arg) - if not publish_dir.is_dir(): - message = ( - f"Input 'publish directory' is not a directory: {publish_dir}" - ) - raise NotADirectoryError(message) - publish_subdir = ( - publish_dir - / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" - ) - publish_subdir.mkdir() - - # Activate on demand benchmarks (C/SPerf are deactivated for 'standard' runs). - session.env["ON_DEMAND_BENCHMARKS"] = "True" - commit_range = "upstream/main^!" - - asv_command = ( - asv_harness.format(posargs=commit_range) + f" --bench={run_type}" - ) - # C/SPerf benchmarks are much bigger than the CI ones: - # Don't fail the whole run if memory blows on 1 benchmark. - asv_command = asv_command.replace(" --strict", "") - # Only do a single round. - asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) - session.run(*asv_command.split(" "), *asv_args) - - asv_command = f"asv publish {commit_range} --html-dir={publish_subdir}" - session.run(*asv_command.split(" ")) - - # Print completion message. - location = Path().cwd() / ".asv" - print( - f'New ASV results for "{run_type}".\n' - f'See "{publish_subdir}",' - f'\n or JSON files under "{location / "results"}".' - ) - - else: - asv_subcommand = first_arg - assert run_type == "custom" - session.run("asv", asv_subcommand, *asv_args) diff --git a/pyproject.toml b/pyproject.toml index bdb8a431e5..4f9ade1351 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,68 @@ requires = [ # Defined by PEP 517 build-backend = "setuptools.build_meta" +[project] +authors = [ + {name = "Iris Contributors", email = "scitools.pub@gmail.com"} +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", + "Operating System :: MacOS", + "Operating System :: POSIX", + "Operating System :: POSIX :: Linux", + "Operating System :: Unix", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Scientific/Engineering :: Visualization", +] +dynamic = [ + "dependencies", + "readme", + "version", +] +description = "A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data" +keywords = [ + "cf-metadata", + "data-analysis", + "earth-science", + "grib", + "netcdf", + "meteorology", + "oceanography", + "space-weather", + "ugrid", + "visualisation", +] +license = {text = "LGPL-3.0-or-later"} +name = "scitools-iris" +requires-python = ">=3.9" + +[project.urls] +Code = "https://github.com/SciTools/iris" +Discussions = "https://github.com/SciTools/iris/discussions" +Documentation = "https://scitools-iris.readthedocs.io/en/stable/" +Issues = "https://github.com/SciTools/iris/issues" + +[tool.setuptools] +license-files = ["COPYING", "COPYING.LESSER"] +zip-safe = false + +[tool.setuptools.dynamic] +dependencies = {file = "requirements/pypi-core.txt"} +readme = {file = "README.md", content-type = "text/markdown"} + +[tool.setuptools.packages.find] +include = ["iris*"] +where = ["lib"] + [tool.setuptools_scm] write_to = "lib/iris/_version.py" local_scheme = "dirty-tag" @@ -15,7 +77,7 @@ version_scheme = "release-branch-semver" [tool.black] line-length = 79 -target-version = ['py38'] +target-version = ['py39'] include = '\.pyi?$' extend-exclude = ''' ( @@ -46,3 +108,29 @@ verbose = "False" [tool.pytest.ini_options] addopts = "-ra" testpaths = "lib/iris" + +[tool.coverage.run] +branch = true +source = [ + "lib/iris", +] +omit = [ + "lib/iris/tests/*", + "lib/iris/etc/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if __name__ == .__main__.:" +] + +[tool.codespell] +ignore-words-list = "alpha-numeric,degreee,discontiguities,lazyness,meaned,nin" +skip = "_build,*.css,*.ipynb,*.js,*.html,*.svg,*.xml,.git,generated" + +[tool.check-manifest] +ignore = [ + "lib/iris/_version.py", + "lib/iris/std_names.py", +] diff --git a/requirements/README.md b/requirements/README.md new file mode 100644 index 0000000000..9d9368b9c2 --- /dev/null +++ b/requirements/README.md @@ -0,0 +1,8 @@ +# ⚠️ + +This directory contains: + +- The `locks` directory which contains auto-generated `conda-lock` environment files for each `python` distribution and `platform` supported by `iris`. +- The **top-level** `conda` environment `*.yml` files for each `python` distribution supported by `iris`. +- The `pip` core package dependencies (`pypi-core.txt`) for the [scitools-iris](https://pypi.org/project/scitools-iris/) package on PyPI. Please reference the `pyproject.toml` in the repository root directory for further details. + diff --git a/requirements/ci/iris.yml b/requirements/ci/iris.yml deleted file mode 120000 index 1e473d36d5..0000000000 --- a/requirements/ci/iris.yml +++ /dev/null @@ -1 +0,0 @@ -py310.yml \ No newline at end of file diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock deleted file mode 100644 index 77cd6b8962..0000000000 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ /dev/null @@ -1,269 +0,0 @@ -# Generated by conda-lock. -# platform: linux-64 -# input_hash: 0543fd9bbb31e9f896ccf547f3b155d68bb748634268c28dde6ff3ac77aa74d3 -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.39-hcc3a1bd_1.conda#737be0d34c22d24432049ab7a3214de4 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-3_cp38.conda#2f3f7af062b42d664117662612022204 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d -https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f -https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad -https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 -https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f -https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 -https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-h9c3ff4c_0.tar.bz2#c77f5e4e418fa47d699d6afa54c5d444 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 -https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734 -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.4-h166bdaf_0.tar.bz2#b4f717df2d377410b462328bf0e8fb7d -https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 -https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.1-h27087fc_0.tar.bz2#0af513b75f78a701a152568a31303bdf -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.7-h0b41bf4_1.conda#7adaac6ff98219bcb99b45e408b80f4e -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.21-h583fa2b_2.conda#7b36a10b58964d4444fcba44244710c5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.31-h26416b9_0.tar.bz2#6c531bc30d49ae75b9c7c7f65bd62e3c -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.6-h63197d8_0.conda#201168ef66095bbd565e124ee2c56a20 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.1.0-hcb278e6_1.conda#d7a07b1f5974bce4735112aaef0c1467 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h82bc61c_0.conda#a01611c54334d783847879ee40109657 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.31-hbc51c84_0.tar.bz2#da9633eee814d4e910fe42643a356315 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.82-he02c5a1_0.conda#f8d7f11d19e4cb2207eab159fd4c0152 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.15-h4a9ceb5_0_cpython.conda#dc29a8a79d0f2c80004cc06d3190104f -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e -https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py38h578d9bd_3.tar.bz2#34e1f12e3ed15aff218644e9d865b722 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a -https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.1-hc2a2eb6_0.tar.bz2#78415f0180a8d9c5bcc47889e00d5fb1 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.11.0-pyhd8ed1ab_0.tar.bz2#eb919f2119a6db5d0192f9e9c3711572 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_1.tar.bz2#41ca56d5cac7bfc7eb4fcdbee878eb84 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.6-default_h3a83d3e_0.conda#535dd0ca1dcb165b6a8ffa10d01945fe -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.87.0-hdc1c0ab_0.conda#bc302fa1cf8eda15c60f669b7524a320 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.1-hb675445_2.conda#509f08b3789d9e7e9a72871491ae08e2 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 -https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py38h0a891b7_2.tar.bz2#c342a370480791db83d5dd20f2d8899f -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py38h97ac3a3_0.tar.bz2#0c469687a517052c0d581fc6e1a4189d -https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.1-py38hab0fcb9_0.conda#2c0b3c72dad0288d9582ccbceb250cb4 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea -https://conda.anaconda.org/conda-forge/noarch/packaging-22.0-pyhd8ed1ab_0.conda#0e8e1bd93998978fc3125522266d12db -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 -https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py38h0a891b7_0.tar.bz2#fe2ef279417faa1af0adf178de2032f7 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 -https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py38h1de0b5d_0.conda#7db73572d4f7e10a759bad609a228ad0 -https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7-pyhd8ed1ab_0.conda#c8d7e34ca76d6ecc03b84bedfd99d689 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_5.tar.bz2#0856c59f9ddb710c640dc0428d66b1b7 -https://conda.anaconda.org/conda-forge/noarch/setuptools-65.6.3-pyhd8ed1ab_0.conda#9600fc9524d3f821e6a6d58c52f5bf5a -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py38h0a891b7_1.tar.bz2#358beb228a53b5e1031862de3525d1d3 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py38h0a891b7_0.tar.bz2#44421904760e9f5ae2035193e04360f0 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/zipp-3.11.0-pyhd8ed1ab_0.conda#09b5b885341697137879a4f039a9e5a1 -https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_3.conda#3ac112151c6b6cfe457e976de41af0c5 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_1.tar.bz2#dcc025a7bb54374979c500c2e161fac9 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.6-py38h43d8883_0.tar.bz2#1107ee053d55172b26c4fc905dd0238e -https://conda.anaconda.org/conda-forge/linux-64/curl-7.87.0-hdc1c0ab_0.conda#b14123ca479b9473d7f7395b0fd25c97 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py38h0a891b7_1.tar.bz2#62c89ddefed9c5835e228a32b357a28d -https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.6-default_h2e3cab8_0.conda#1b2cee49acc5b03c73ad0f68bfe04bb8 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h26c90d9_1008.tar.bz2#6bc8cd29312f4fc77156b78124e165cd -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c -https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py38hb32c036_0.conda#a288a6e69efc2f20c30ebfa590e11bed -https://conda.anaconda.org/conda-forge/noarch/pip-22.3.1-pyhd8ed1ab_0.tar.bz2#da66f2851b9836d3a7c5190082a45f7d -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h8ffa02c_1.conda#ed901e1f5c504b144b31f015c6702634 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-h126f2b6_0.tar.bz2#e4b74b33e13dd146e7d8b5078fc9ad30 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.0-pyhd8ed1ab_2.tar.bz2#ac82c7aebc282e6ac0450fca012ca78c -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h26c90d9_3.tar.bz2#6e7902b0e96f42fa1b73daa5f65dd669 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py38h26c90d9_2.tar.bz2#d30399a3c636c75cfd3460c92effa960 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.0-py38hd07e089_0.conda#e8243980979661d5e941fcfd954ddb13 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.5-py38hfa26641_0.conda#7be81814bae276dc7b4c707cf1e8186b -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h0a891b7_1005.tar.bz2#e99e08812dfff30fdd17b3f8838e2759 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py38h26c90d9_2.tar.bz2#0ea017e84efe45badce6c32f274dbf8e -https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.0-py38h3d167d9_0.conda#0ef859aa9dafce54bdf3d56715daed35 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.12.1-pyhd8ed1ab_0.conda#f12878f9839c72f3d51af02fb10da43d -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.21.3-h25f0c4b_1.conda#0c8a8f15aa319c91d9010072278feddd -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.2-py38hb021067_0.tar.bz2#72422499195d8aded0dfd461c6e3e86f -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.2-py38h8f669ce_0.conda#dbc17622f9d159be987bd21959d5494e -https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.6.2-pyhd8ed1ab_0.conda#0b4cc3f8181b0d8446eb5387d7848a54 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py38hf928c62_0.conda#bb6d6874f1dcafdd2dce7dfd54d2b96c -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38hfa26641_2.tar.bz2#ad6437509a14f1e8e5b8a354f93f340c -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.1.0-pyhd8ed1ab_0.conda#e82f8fb903d7c4a59c77954759c341f9 -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_3.tar.bz2#82b3797d08a43a101b645becbb938e65 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.21.3-h4243ec0_1.conda#905563d166c13ba299e39d6c9fcebd1c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.12-pyhd8ed1ab_0.conda#a34dcea79b2bed9520682a07f80d1c0f -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_1.tar.bz2#0b69750bb937cab0db14f6bcef6fd787 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py38h6b4b75c_103.conda#ea3d2204fc3a7db7d831daa437a58717 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046 -https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.17.1-py38h578d9bd_0.conda#4ddc66bb73c2d53d194875c2ee8f0f06 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_102.conda#cbae8c932a9d2ee620db7ce7ae0abaf5 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.21.0-py38h578d9bd_0.conda#4fb68a31e2377d41d5a33e47a5436d75 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hf6cd601_5.conda#9c23a5205b67f2a67b19c84bf1fd7f5e -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.13-pyhd8ed1ab_0.conda#3078ef2359efd6ecadbc7e085c5e0592 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py38h4407c66_101.conda#1deba9421c01396e0b1381a02a29ed93 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.0.5-h2e5815a_0.conda#96bf06b24d74a5bf826485e9032c9312 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38h7492b6b_2.tar.bz2#cfa725eff634872f90dcd5ebf8e8dc1a -https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.2-py38h578d9bd_0.tar.bz2#e1a19f0d4686a701d4a4acce2b625acb -https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py38h10c12cc_0.conda#466ea530d622838f6cdec4f771ddc249 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py38h3d2c718_0.conda#55ba6e3a49c4293302262286a49607d8 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/iris.yml b/requirements/iris.yml new file mode 120000 index 0000000000..b0c50b8bfd --- /dev/null +++ b/requirements/iris.yml @@ -0,0 +1 @@ +py311.yml \ No newline at end of file diff --git a/requirements/locks/README.md b/requirements/locks/README.md new file mode 100644 index 0000000000..21d72631b4 --- /dev/null +++ b/requirements/locks/README.md @@ -0,0 +1,5 @@ +# ⚠️ + +This directory contains auto-generated `conda-lock` environment files for each `python` distribution supported by `iris`. + +Please **do not** manually edit these files as they will be overwritten by the `refresh-lockfiles` GHA CI workflow. diff --git a/requirements/ci/nox.lock/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock similarity index 50% rename from requirements/ci/nox.lock/py310-linux-64.lock rename to requirements/locks/py310-linux-64.lock index 75ec1e5579..6133f53225 100644 --- a/requirements/ci/nox.lock/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,19 +1,18 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 234b47d943728b5abe70fba0fd74c6adc10e4f1e2a14b919344f8a693b5b3e6f +# input_hash: 081cae6b15083563c7942b761f2295b86794b799d4c679a81f95c695c576e491 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.39-hcc3a1bd_1.conda#737be0d34c22d24432049ab7a3214de4 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 -https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 @@ -23,248 +22,251 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad -https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f -https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-h9c3ff4c_0.tar.bz2#c77f5e4e418fa47d699d6afa54c5d444 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 -https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.4-h166bdaf_0.tar.bz2#b4f717df2d377410b462328bf0e8fb7d +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 -https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.1-h27087fc_0.tar.bz2#0af513b75f78a701a152568a31303bdf -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.7-h0b41bf4_1.conda#7adaac6ff98219bcb99b45e408b80f4e +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.21-h583fa2b_2.conda#7b36a10b58964d4444fcba44244710c5 +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.31-h26416b9_0.tar.bz2#6c531bc30d49ae75b9c7c7f65bd62e3c +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.6-h63197d8_0.conda#201168ef66095bbd565e124ee2c56a20 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.1.0-hcb278e6_1.conda#d7a07b1f5974bce4735112aaef0c1467 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h82bc61c_0.conda#a01611c54334d783847879ee40109657 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.31-hbc51c84_0.tar.bz2#da9633eee814d4e910fe42643a356315 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.82-he02c5a1_0.conda#f8d7f11d19e4cb2207eab159fd4c0152 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.8-h4a9ceb5_0_cpython.conda#be2a6d78752c2ab85f360ce37d2c64e2 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.11-he550d4f_0_cpython.conda#7439c9d24378a82b73a7a53868dacdf1 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py310hc6cd4ac_0.conda#115ffd79412d084f541f485b92c94fcf https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py310hff52083_3.tar.bz2#785160da087cf1d70e989afbb761f01c -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.1-hc2a2eb6_0.tar.bz2#78415f0180a8d9c5bcc47889e00d5fb1 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.11.0-pyhd8ed1ab_0.tar.bz2#eb919f2119a6db5d0192f9e9c3711572 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.6-default_h3a83d3e_0.conda#535dd0ca1dcb165b6a8ffa10d01945fe +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.87.0-hdc1c0ab_0.conda#bc302fa1cf8eda15c60f669b7524a320 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.1-hb675445_2.conda#509f08b3789d9e7e9a72871491ae08e2 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py310h5764c6d_2.tar.bz2#2d7028ea2a77f909931e1a173d952261 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py310h37cc914_0.tar.bz2#98d598d9178d7f3091212c61c0be693c +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.1-py310h08bbf29_0.conda#0d1f2e988c8810be90ffe441a303090a +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea -https://conda.anaconda.org/conda-forge/noarch/packaging-22.0-pyhd8ed1ab_0.conda#0e8e1bd93998978fc3125522266d12db +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py310h5764c6d_0.tar.bz2#c3c55664e9becc48e6a652e2b641961f +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b -https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7-pyhd8ed1ab_0.conda#c8d7e34ca76d6ecc03b84bedfd99d689 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 -https://conda.anaconda.org/conda-forge/noarch/setuptools-65.6.3-pyhd8ed1ab_0.conda#9600fc9524d3f821e6a6d58c52f5bf5a +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py310h5764c6d_1.tar.bz2#be4a201ac582c11d89ed7d15b3157cc3 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py310h2372a71_0.conda#1c510e74c87dc9b8fe1f7f9e8dbcef96 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/zipp-3.11.0-pyhd8ed1ab_0.conda#09b5b885341697137879a4f039a9e5a1 -https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.6-py310hbf28c38_0.tar.bz2#c5b1699e390d30b680dd93a2b251062b -https://conda.anaconda.org/conda-forge/linux-64/curl-7.87.0-hdc1c0ab_0.conda#b14123ca479b9473d7f7395b0fd25c97 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py310h5764c6d_1.tar.bz2#12ebe92a8a578bc903bd844744f4d040 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.6-py310h2372a71_0.conda#93b5564452a94d4bc633ab692ef29598 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py310h2372a71_0.conda#76426eaff204520e719207700359a855 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.6-default_h2e3cab8_0.conda#1b2cee49acc5b03c73ad0f68bfe04bb8 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c -https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py310h4927cde_0.conda#66366aceea767f174f4d0408f3a62812 -https://conda.anaconda.org/conda-forge/noarch/pip-22.3.1-pyhd8ed1ab_0.tar.bz2#da66f2851b9836d3a7c5190082a45f7d -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h8ffa02c_1.conda#ed901e1f5c504b144b31f015c6702634 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-h126f2b6_0.tar.bz2#e4b74b33e13dd146e7d8b5078fc9ad30 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.0-pyhd8ed1ab_2.tar.bz2#ac82c7aebc282e6ac0450fca012ca78c +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py310h582fbeb_1.conda#cf62f6cff3536eafaaa0c740b0bf7465 +https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py310hde88566_3.tar.bz2#0b686f306a76fba9a61e7019f854321f -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py310hde88566_2.tar.bz2#61e2f2f7befaf45f47d1da449a9a0aca -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.0-py310h8b84c32_0.conda#823009371d9b961c83cdb9aa80e1e6e7 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.5-py310hd8f1fbe_0.conda#765b39936044b542a69ec2d863f5b891 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py310h5764c6d_1005.tar.bz2#87669c3468dff637bbd0363bc0f895cf -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py310hde88566_2.tar.bz2#7433944046deda7775c5b1f7e0b6fe18 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.0-py310h34c0648_0.conda#af4b0c22dc4006ce3c095e840cb2efd7 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.12.1-pyhd8ed1ab_0.conda#f12878f9839c72f3d51af02fb10da43d -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.21.3-h25f0c4b_1.conda#0c8a8f15aa319c91d9010072278feddd -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.2-py310h8d5ebf3_0.tar.bz2#da51ddb20c0f99d672eb756c3abf27e7 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.2-py310h769672d_0.conda#bc363997d22f3b058fb17f1e89d4c96f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.6.2-pyhd8ed1ab_0.conda#0b4cc3f8181b0d8446eb5387d7848a54 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py310hfc24d34_0.conda#c126f81b5cea6b2d4a64d0744249a26f -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310hd8f1fbe_2.tar.bz2#0d815f1b2258d3d4c17cc80fd01e0f36 -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.1.0-pyhd8ed1ab_0.conda#e82f8fb903d7c4a59c77954759c341f9 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py310hc6cd4ac_0.conda#a3217e1bff09702dfdfcb536825fc12d +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py310he60537e_0.conda#68b2dd34c69d08b05a9db5e3596fe3ee +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py310h7cbd5c2_1.conda#25fc16ee9a1df69e91c8213530f2cc8c +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py310hb814896_1.conda#d44c6841ee904252e0e8b7a1c7b11383 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.21.3-h4243ec0_1.conda#905563d166c13ba299e39d6c9fcebd1c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.12-pyhd8ed1ab_0.conda#a34dcea79b2bed9520682a07f80d1c0f +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_1.tar.bz2#0b69750bb937cab0db14f6bcef6fd787 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py310h0a86a1f_103.conda#7f69695b684f2595d9ba1ce26d693b7d -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046 -https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.17.1-py310hff52083_0.conda#d26ee3f6561669ec1f118d6d3404e42a -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_102.conda#cbae8c932a9d2ee620db7ce7ae0abaf5 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py310h2d0b64f_102.conda#7e4ea99dccc0dd27fd1c87821ba8ef13 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.21.0-py310hff52083_0.conda#41b6a707f04268b028c497d346a97693 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hf6cd601_5.conda#9c23a5205b67f2a67b19c84bf1fd7f5e -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.13-pyhd8ed1ab_0.conda#3078ef2359efd6ecadbc7e085c5e0592 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py310h515c5ea_101.conda#8a00edb7362ef5ff0db5dd75099daac7 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.0.5-h2e5815a_0.conda#96bf06b24d74a5bf826485e9032c9312 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310h29803b5_2.tar.bz2#1e2c49215b17e6cf06edf100c9869ebe -https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.2-py310hff52083_0.tar.bz2#aa78d12708912cd34135e6694a046ba0 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py310h8deb116_0.conda#ef72eeddf5316330730b11907c6c07d8 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310hcb7e713_0.conda#bd14eaad9bbf54b78e48ecb8b644fcf6 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py310ha4c1d20_3.conda#0414d57832172f3cdcf56b5f053e177d +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310h7eb24ba_1.conda#e727db22a14344608c2caeccaa9e9d2b +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py310hff52083_0.conda#c2b60c44d38d32779006a15c2581f0d1 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock new file mode 100644 index 0000000000..7ab3e170f6 --- /dev/null +++ b/requirements/locks/py311-linux-64.lock @@ -0,0 +1,271 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 735e824e95f2b2e689fb7433e592a1511d9a7959fe4b524373621b99ae41ee87 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.3-h2755cc3_0_cpython.conda#37005ea5f68df6a8a381b70cf4d4a160 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py311hb755f60_0.conda#17f4738a1ca6155a63d2a0cbd3e4a8b1 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py311h2582759_0.conda#adb20bd57069614552adac60a020c36d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.6-py311h459d7ec_0.conda#959422baa360b4aaf505aedff7d77943 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py311h459d7ec_0.conda#ddd2cd004e10bc7a1e042283326cbf91 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py311h0b84326_1.conda#6be2190fdbf26a6c1d3356a54d955237 +https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py311hb755f60_0.conda#2b5430f2f1651f460c852e1fdd549184 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py311h320fe9a_1.conda#37f841a3140999c4735f7d8091072bea +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py311h1850bce_1.conda#572159a946b809df471b11db4995c708 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py311hcafe171_3.conda#0d79df2a96f6572fed2883374400b235 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py311h1717473_102.conda#d3b4d3ed2f3188d27d43e2c95d0dc2ab +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py311h64a7726_3.conda#a01a3a7428e770db5a0c8c7ab5fce7f7 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py311hd88b842_1.conda#f19feb9440890ccb806a367ea9ae0654 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py311ha74522f_3.conda#ad6dd0bed0cdf5f2d4eb2b989d6253b3 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py311h38be061_0.conda#8fd462c8bcbba5a3affcb2d04e387476 diff --git a/requirements/ci/nox.lock/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock similarity index 50% rename from requirements/ci/nox.lock/py39-linux-64.lock rename to requirements/locks/py39-linux-64.lock index cc896e6989..2467c06504 100644 --- a/requirements/ci/nox.lock/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,19 +1,18 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: de178c2d53980747bafc10c4a4387eeb8c700311af7b35a2fcb49f1b441b960b +# input_hash: bd8a57fefa94205701a278eab5fbfd897a3c49e389f926b1880d718caa0d6195 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.39-hcc3a1bd_1.conda#737be0d34c22d24432049ab7a3214de4 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e -https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 @@ -23,248 +22,251 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad -https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f -https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268 +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-h9c3ff4c_0.tar.bz2#c77f5e4e418fa47d699d6afa54c5d444 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 -https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.4-h166bdaf_0.tar.bz2#b4f717df2d377410b462328bf0e8fb7d +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 -https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.1-h27087fc_0.tar.bz2#0af513b75f78a701a152568a31303bdf -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.7-h0b41bf4_1.conda#7adaac6ff98219bcb99b45e408b80f4e +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.21-h583fa2b_2.conda#7b36a10b58964d4444fcba44244710c5 +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.31-h26416b9_0.tar.bz2#6c531bc30d49ae75b9c7c7f65bd62e3c +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.6-h63197d8_0.conda#201168ef66095bbd565e124ee2c56a20 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.1.0-hcb278e6_1.conda#d7a07b1f5974bce4735112aaef0c1467 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h82bc61c_0.conda#a01611c54334d783847879ee40109657 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.31-hbc51c84_0.tar.bz2#da9633eee814d4e910fe42643a356315 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.82-he02c5a1_0.conda#f8d7f11d19e4cb2207eab159fd4c0152 -https://conda.anaconda.org/conda-forge/linux-64/python-3.9.15-hba424b6_0_cpython.conda#7b9485fce17fac2dd4aca6117a9936c2 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py39h3d6467e_0.conda#019c9509764e66c9d9d38b5ca365a9f4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py39hf3d152e_3.tar.bz2#3caf51fb6a259d377f05d6913193b11c -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py39hf3d152e_3.tar.bz2#4f0fa7459a1f40a969aaad418b1c428c +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.1-hc2a2eb6_0.tar.bz2#78415f0180a8d9c5bcc47889e00d5fb1 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.11.0-pyhd8ed1ab_0.tar.bz2#eb919f2119a6db5d0192f9e9c3711572 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.6-default_h3a83d3e_0.conda#535dd0ca1dcb165b6a8ffa10d01945fe +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.87.0-hdc1c0ab_0.conda#bc302fa1cf8eda15c60f669b7524a320 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.1-hb675445_2.conda#509f08b3789d9e7e9a72871491ae08e2 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py39hb9d737c_2.tar.bz2#c678e07e7862b3157fb9f6d908233ffa -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py39h32b9844_0.tar.bz2#b035b507f55bb6a967d86d4b7e059437 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.1-py39h223a676_0.conda#ce779b1c4e7ff4cc2f2690d173974daf +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.5-py39h3d75532_0.conda#ea5d332e361eb72c2593cf79559bc0ec https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea -https://conda.anaconda.org/conda-forge/noarch/packaging-22.0-pyhd8ed1ab_0.conda#0e8e1bd93998978fc3125522266d12db +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py39hb9d737c_0.tar.bz2#12184951da572828fb986b06ffb63eed +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 -https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7-pyhd8ed1ab_0.conda#c8d7e34ca76d6ecc03b84bedfd99d689 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 -https://conda.anaconda.org/conda-forge/noarch/setuptools-65.6.3-pyhd8ed1ab_0.conda#9600fc9524d3f821e6a6d58c52f5bf5a +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py39hb9d737c_1.tar.bz2#8a7d309b08cff6386fe384aa10dd3748 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py39hd1e30aa_0.conda#da334eecb1ea2248e28294c49e6f6d89 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/zipp-3.11.0-pyhd8ed1ab_0.conda#09b5b885341697137879a4f039a9e5a1 -https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.6-py39hf939315_0.tar.bz2#fb3f77fe25042c20c51974fcfe72f797 -https://conda.anaconda.org/conda-forge/linux-64/curl-7.87.0-hdc1c0ab_0.conda#b14123ca479b9473d7f7395b0fd25c97 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py39hb9d737c_1.tar.bz2#3f2d104f2fefdd5e8a205dd3aacbf1d7 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py39hd1e30aa_0.conda#80605b792f58cf5c78a5b7e20cef1e35 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed1ab_0.conda#e5fd2260a231ee63b6969f4801082f2b https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.6-default_h2e3cab8_0.conda#1b2cee49acc5b03c73ad0f68bfe04bb8 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c -https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py39ha08a7e4_0.conda#d62ba9d1a981544c809813afaf0be5c0 -https://conda.anaconda.org/conda-forge/noarch/pip-22.3.1-pyhd8ed1ab_0.tar.bz2#da66f2851b9836d3a7c5190082a45f7d -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h8ffa02c_1.conda#ed901e1f5c504b144b31f015c6702634 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-h126f2b6_0.tar.bz2#e4b74b33e13dd146e7d8b5078fc9ad30 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.0-pyhd8ed1ab_2.tar.bz2#ac82c7aebc282e6ac0450fca012ca78c +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py39haaeba84_1.conda#d7aa9b99ed6ade75fbab1e4cedcb3ce2 +https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py39h2ae25f5_3.tar.bz2#bcc7de3bb458a198b598ac1f75bf37e3 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py39h2ae25f5_2.tar.bz2#234ad9828eca1caf0f2fdcb4a24ad816 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.0-py39hc9151fd_0.conda#735b335f9250d84a5da94ffb76692db8 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.5-py39h5a03fae_0.conda#c3eb463691a8b93f1c381a9e56ecad9a -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py39hb9d737c_1005.tar.bz2#a639fdd9428d8b25f8326a3838d54045 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py39h2ae25f5_2.tar.bz2#b3b4aab96d1c4ed394d6f4b9146699d4 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.0-py39h079d5ae_0.conda#70ac60b214a8df9b9ce63e05af7d0976 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.12.1-pyhd8ed1ab_0.conda#f12878f9839c72f3d51af02fb10da43d -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.21.3-h25f0c4b_1.conda#0c8a8f15aa319c91d9010072278feddd -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.2-py39hf9fd14e_0.tar.bz2#78ce32061e0be12deb8e0f11ffb76906 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.2-py39h4661b88_0.conda#e17e50269c268d79478956a262a9fe13 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.6.2-pyhd8ed1ab_0.conda#0b4cc3f8181b0d8446eb5387d7848a54 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py39h12578bd_0.conda#7edbb99bec2bfab82f86abd71d24b505 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h5a03fae_2.tar.bz2#306f1a018668f06a0bd89350a3f62c07 -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.1.0-pyhd8ed1ab_0.conda#e82f8fb903d7c4a59c77954759c341f9 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py39h3d6467e_0.conda#6d990f672cc70e5c480ddb74b789a17c +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.12.0-pyhd8ed1ab_0.conda#3544c818f0720c89eb16ae6940ab440b +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py39h40cae4c_1.conda#85bc4d45cd98f84af0c00435fff23f67 +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py39h718ffca_1.conda#a19bf4be7ebce54623541fa4ad22abb4 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.21.3-h4243ec0_1.conda#905563d166c13ba299e39d6c9fcebd1c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.12-pyhd8ed1ab_0.conda#a34dcea79b2bed9520682a07f80d1c0f -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_1.tar.bz2#0b69750bb937cab0db14f6bcef6fd787 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py39h94a714e_103.conda#ee29e7176b5854fa09ec17b101945401 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046 -https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.17.1-py39hf3d152e_0.conda#dd1be6ccb267f13bdc5c44cfb76c4080 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_102.conda#cbae8c932a9d2ee620db7ce7ae0abaf5 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py39he190548_0.conda#f2a931db797bb58bd335f4a857b4c898 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py39h369ccc5_102.conda#dda035d195cf87b493bbb0aa296c381c +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.21.0-py39hf3d152e_0.conda#9dafac76ddd44f3b9a4a45ad601c5917 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hf6cd601_5.conda#9c23a5205b67f2a67b19c84bf1fd7f5e -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.13-pyhd8ed1ab_0.conda#3078ef2359efd6ecadbc7e085c5e0592 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py39h3088dd8_101.conda#e90e56e1bd5f2a484e435fd2745cd809 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.0.5-h2e5815a_0.conda#96bf06b24d74a5bf826485e9032c9312 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h18e9c17_2.tar.bz2#384809c51fb2adc04773f6fa097cd051 -https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.2-py39hf3d152e_0.tar.bz2#03225b4745d1dee7bb19d81e41c773a0 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py39h7360e5f_0.conda#d6d4f8195ec2c846deebe71306f60298 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h6e7ad6e_0.conda#7cb72bd5b1e7c5a23a062db90889356b +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py39h6183b62_3.conda#84c4007675da392fdb99faeefda69552 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h4bd5d67_1.conda#a60d65263a8ddbff5381ed91d4f6953e +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py39hf3d152e_0.conda#682772fa385911fb5efffbce21b269c5 diff --git a/requirements/ci/py310.yml b/requirements/py310.yml similarity index 78% rename from requirements/ci/py310.yml rename to requirements/py310.yml index d79015c055..2ba8abb7ae 100644 --- a/requirements/ci/py310.yml +++ b/requirements/py310.yml @@ -14,10 +14,11 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2.26 + - dask-core >=2022.9.0 + - libnetcdf !=4.9.1 - matplotlib >=3.5 - - netcdf4 <1.6.1 - - numpy >=1.19 + - netcdf4 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -34,21 +35,23 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit - psutil - pytest + - pytest-cov - pytest-xdist - requests # Documentation dependencies. - - sphinx - - sphinxcontrib-napoleon + - sphinx <=5.3 + - sphinxcontrib-apidoc - sphinx-copybutton - sphinx-gallery >=0.11.0 - - sphinx-panels - - pydata-sphinx-theme + - sphinx-design + - pydata-sphinx-theme >=0.13.0 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/ci/py38.yml b/requirements/py311.yml similarity index 76% rename from requirements/ci/py38.yml rename to requirements/py311.yml index b68e8ccf45..80e112d850 100644 --- a/requirements/ci/py38.yml +++ b/requirements/py311.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python =3.8 + - python =3.11 # Setup dependencies. - setuptools >=64 @@ -14,10 +14,11 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2.26 + - dask-core >=2022.9.0 + - libnetcdf !=4.9.1 - matplotlib >=3.5 - - netcdf4 <1.6.1 - - numpy >=1.19 + - netcdf4 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -34,21 +35,23 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit - psutil - pytest + - pytest-cov - pytest-xdist - requests # Documentation dependencies. - - sphinx - - sphinxcontrib-napoleon + - sphinx <=5.3 + - sphinxcontrib-apidoc - sphinx-copybutton - sphinx-gallery >=0.11.0 - - sphinx-panels - - pydata-sphinx-theme + - sphinx-design + - pydata-sphinx-theme >=0.13.0 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/ci/py39.yml b/requirements/py39.yml similarity index 79% rename from requirements/ci/py39.yml rename to requirements/py39.yml index 9fec76cfde..ed6a5eda54 100644 --- a/requirements/ci/py39.yml +++ b/requirements/py39.yml @@ -14,10 +14,11 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2.26 + - dask-core >=2022.9.0 + - libnetcdf !=4.9.1 - matplotlib >=3.5 - - netcdf4 <1.6.1 - - numpy >=1.19 + - netcdf4 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -34,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit @@ -43,12 +45,12 @@ dependencies: - requests # Documentation dependencies. - - sphinx - - sphinxcontrib-napoleon + - sphinx <=5.3 + - sphinxcontrib-apidoc - sphinx-copybutton - sphinx-gallery >=0.11.0 - - sphinx-panels - - pydata-sphinx-theme + - sphinx-design + - pydata-sphinx-theme >=0.13.0 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt new file mode 100644 index 0000000000..7937f73b4f --- /dev/null +++ b/requirements/pypi-core.txt @@ -0,0 +1,12 @@ +cartopy>=0.21 +cf-units>=3.1 +cftime>=1.5.0 +dask[array]>=2022.9.0 +# libnetcdf!=4.9.1 (not available on PyPI) +matplotlib>=3.5 +netcdf4 +numpy>=1.21,!=1.24.3 +pyproj +scipy +shapely!=1.8.3 +xxhash \ No newline at end of file diff --git a/requirements/ci/readthedocs.yml b/requirements/readthedocs.yml similarity index 100% rename from requirements/ci/readthedocs.yml rename to requirements/readthedocs.yml diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 75647e6623..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,141 +0,0 @@ -[metadata] -author = SciTools Developers -author_email = scitools.pub@gmail.com -classifiers = - Development Status :: 5 - Production/Stable - Intended Audience :: Science/Research - License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) - Operating System :: MacOS - Operating System :: POSIX - Operating System :: POSIX :: Linux - Operating System :: Unix - Programming Language :: Python - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: Implementation :: CPython - Topic :: Scientific/Engineering - Topic :: Scientific/Engineering :: Atmospheric Science - Topic :: Scientific/Engineering :: Visualization -description = A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data -download_url = https://github.com/SciTools/iris -keywords = - cf-metadata - data-analysis - earth-science - grib - netcdf - meteorology - oceanography - space-weather - ugrid - visualisation -license = LGPL-3.0-or-later -license_files = COPYING.LESSER -long_description = file: README.md -long_description_content_type = text/markdown -name = scitools-iris -project_urls = - Code = https://github.com/SciTools/iris - Discussions = https://github.com/SciTools/iris/discussions - Documentation = https://scitools-iris.readthedocs.io/en/stable/ - Issues = https://github.com/SciTools/iris/issues -url = https://github.com/SciTools/iris -version = attr: iris.__version__ - -[options] -include_package_data = True -install_requires = - cartopy>=0.21 - cf-units>=3.1 - cftime>=1.5.0 - dask[array]>=2.26 - matplotlib>=3.5 - netcdf4<1.6.1 - numpy>=1.19 - scipy - shapely!=1.8.3 - xxhash -packages = find_namespace: -package_dir = - =lib -python_requires = - >=3.8 -zip_safe = False - -[options.packages.find] -where = lib - -[options.extras_require] -docs = - sphinx<5 - sphinx-copybutton - sphinx-gallery>=0.11.0 - sphinx_rtd_theme - sphinxcontrib-napoleon - sphinx-panels -test = - filelock - imagehash>=4.0 - pre-commit - requests - pytest - pytest-xdist -all = - mo_pack - nc-time-axis>=1.4 - pandas - stratify - %(docs)s - %(test)s - -[flake8] -# References: -# https://flake8.readthedocs.io/en/latest/user/configuration.html -# https://flake8.readthedocs.io/en/latest/user/error-codes.html -# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes - -max-line-length = 80 -max-complexity = 50 -select = C,E,F,W,B,B950 -ignore = - # E203: whitespace before ':' - E203, - # E226: missing whitespace around arithmetic operator - E226, - # E231: missing whitespace after ',', ';', or ':' - E231, - # E402: module level imports on one line - E402, - # E501: line too long - E501, - # E731: do not assign a lambda expression, use a def - E731, - # W503: line break before binary operator - W503, - # W504: line break after binary operator - W504, -exclude = - # - # ignore the following directories - # - .eggs, - build, - docs/src/sphinxext/*, - tools/*, - benchmarks/*, - # - # ignore auto-generated files - # - _ff_cross_refrences.py, - std_names.py, - um_cf_map.py, - # - # ignore third-party files - # - gitwash_dumper.py, - # - # convenience imports - # - lib/iris/common/__init__.py diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py new file mode 100755 index 0000000000..5d7dd2abf2 --- /dev/null +++ b/tools/release_do_nothing.py @@ -0,0 +1,828 @@ +#!/usr/bin/env python3 +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +A do-nothing script to hand-hold through the Iris release process. + +https://blog.danslimmon.com/2019/07/15/do-nothing-scripting-the-key-to-gradual-automation/ + +""" +from datetime import datetime +from enum import Enum +from pathlib import Path +import re +from sys import stderr +from time import sleep +import typing + + +class ReleaseTypes(Enum): + MAJOR = 0 + MINOR = 1 + PATCH = 2 + + +valid_release_types = typing.Literal["major", "minor", "patch"] + + +class ReleaseStrings: + """An easy way to pass the various flavours of release string between functions.""" + + def __init__(self, input_tag: str): + version_mask = r"v\d+\.\d+\.\d+\D*.*" + regex_101 = "https://regex101.com/r/dLVaNH/1" + if re.fullmatch(version_mask, input_tag) is None: + message = ( + "Release tag does not match the input mask:\n" + f"{version_mask}\n" + f"({regex_101})" + ) + raise ValueError(message) + else: + self.tag = input_tag # v1.2.3rc0 + + self.series = ".".join(self.tag.split(".")[:2]) # v1.2 + self.branch = self.series + ".x" # v1.2.x + self.release = self.tag[1:] # 1.2.3rc0 + + +class WhatsNewRsts: + """An easy way to pass the paths of various What's New files between functions.""" + + def __init__(self, release_strings: ReleaseStrings): + src_dir = Path(__file__).parents[1] / "docs" / "src" + whatsnew_dir = src_dir / "whatsnew" + assert whatsnew_dir.is_dir() + + self.latest = whatsnew_dir / "latest.rst" + self.release = whatsnew_dir / (release_strings.series[1:] + ".rst") + self.index = whatsnew_dir / "index.rst" + self.template = self.latest.with_suffix(".rst.template") + + +def _break_print(message: str): + print() + print(message) + # Help with flow/visibility by waiting 1secs before proceeding. + sleep(1) + + +def _mark_section(section_number: int): + _break_print(f"SECTION {section_number} ...") + + +def _get_input(message: str, expected_inputs: str) -> str: + _break_print(message) + return input(expected_inputs + " : ") + + +def _wait_for_done(message: str): + _break_print(message) + done = False + while not done: + done = ( + input("Step complete? y / [n] : ").casefold() == "y".casefold() + ) + + +def _report_problem(message: str): + print(message, file=stderr) + # To ensure correct sequencing of messages. + sleep(0.5) + + +def get_release_type() -> ReleaseTypes: + release_type = None + release_types_str = " ".join( + [f"{m.name}={m.value}" for m in ReleaseTypes.__members__.values()] + ) + message = "What type of release are you preparing?\nhttps://semver.org/" + while release_type is None: + input_type = _get_input(message, release_types_str) + try: + release_type = ReleaseTypes(int(input_type)) + except ValueError: + _report_problem("Invalid release type. Please try again ...") + _break_print(f"{release_type} confirmed.") + return release_type + + +def get_release_tag() -> ReleaseStrings: + # TODO: automate using setuptools_scm. + release_strings = None + message = ( + "Input the release tag you are creating today, including any release " + "candidate suffix.\n" + "https://semver.org/\n" + "https://scitools-iris.readthedocs.io/en/latest/developers_guide/release.html?highlight=candidate#release-candidate" + ) + while release_strings is None: + input_tag = _get_input(message, "e.g. v1.2.3rc0") + try: + release_strings = ReleaseStrings(input_tag) + except ValueError as err: + _report_problem(str(err)) + return release_strings + + +def check_release_candidate( + release_type: ReleaseTypes, release_strings: ReleaseStrings +) -> bool: + is_release_candidate = "rc" in release_strings.tag + + message = "Checking tag for release candidate: " + if is_release_candidate: + message += "DETECTED\nThis IS a release candidate." + else: + message += "NOT DETECTED\nThis IS NOT a release candidate." + _break_print(message) + + if release_type == ReleaseTypes.PATCH and is_release_candidate: + message = ( + "Release candidates are not expected for PATCH releases. " + "Are you sure you want to continue?" + ) + if _get_input(message, "y / [n]").casefold() != "y".casefold(): + exit() + return is_release_candidate + + +def check_first_in_series( + release_type: ReleaseTypes, + release_strings: ReleaseStrings, + is_release_candidate: bool, +) -> bool: + first_in_series = False + if release_type != ReleaseTypes.PATCH: + message = ( + "Have there been any prior releases in the " + f"{release_strings.series} series, including release candidates?" + ) + first_in_series = ( + _get_input(message, "[y] / n").casefold() == "n".casefold() + ) + if first_in_series: + _break_print("First in series confirmed.") + if not is_release_candidate: + message = ( + "The first release in a series is expected to be a " + "release candidate, but this is not. Are you sure you " + "want to continue?" + ) + if _get_input(message, "y / [n]").casefold() != "y".casefold(): + exit() + else: + _break_print("Existing series confirmed.") + return first_in_series + + +def update_standard_names(first_in_series: bool) -> None: + if first_in_series: + message = ( + "Update the file ``etc/cf-standard-name-table.xml`` to the latest CF " + "standard names, via a new Pull Request.\n" + "(This is used during build to automatically generate the sourcefile " + "``lib/iris/std_names.py``).\n" + "Latest standard names:\n" + 'wget "http://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml";' + ) + _wait_for_done(message) + + +def check_deprecations(release_type: ReleaseTypes) -> None: + if release_type == ReleaseTypes.MAJOR: + message = ( + "This is a MAJOR release - be sure to finalise all deprecations " + "and FUTUREs from previous releases, via a new Pull Request.\n" + "https://scitools-iris.readthedocs.io/en/latest/developers_guide/contributing_deprecations.html" + ) + _wait_for_done(message) + + +def _delete_local_branch(branch_name: str): + message = ( + "Before the next step, avoid a name clash by deleting any " + "existing local branch, if one exists.\n" + f"git branch -D {branch_name};\n" + f"git push -d origin {branch_name};" + ) + _wait_for_done(message) + + +def create_release_branch( + release_strings: ReleaseStrings, first_in_series: bool +) -> None: + # TODO: automate + + _break_print("Release branch management ...") + + if first_in_series: + message = ( + "Visit https://github.com/SciTools/iris and create the" + f"``{release_strings.branch}`` release branch from ``main``." + ) + _wait_for_done(message) + + else: + message = ( + "Cherry-pick any specific commits that are needed from ``main`` " + f"onto {release_strings.branch} , to get the CI passing.\n" + "E.g. a new dependency pin may have been introduced since " + f"{release_strings.branch} was last updated from ``main``.\n" + "DO NOT squash-merge - want to preserve the original commit SHA's." + ) + _wait_for_done(message) + + +def finalise_whats_new( + release_type: ReleaseTypes, + release_strings: ReleaseStrings, + is_release_candidate: bool, + first_in_series: bool, +) -> WhatsNewRsts: + _break_print("What's New finalisation ...") + + working_branch = release_strings.branch + ".updates" + _delete_local_branch(working_branch) + message = ( + f"Checkout a local branch from the official {release_strings.branch} branch.\n" + "git fetch upstream;\n" + f"git checkout upstream/{release_strings.branch} -b " + f"{working_branch};" + ) + _wait_for_done(message) + + rsts = WhatsNewRsts(release_strings) + + # TODO: automate + if first_in_series: + message = ( + "'Cut' the What's New for the release.\n" + f"git mv {rsts.latest.absolute()} {rsts.release.absolute()};" + ) + _wait_for_done(message) + + message = ( + f"In {rsts.index.absolute()}:\n" + f"Replace references to {rsts.latest.name} with {rsts.release.name}" + ) + _wait_for_done(message) + + _break_print(f"What's New file path = {rsts.release}") + + if not release_type == ReleaseTypes.PATCH: + whatsnew_title = f"{release_strings.series} ({datetime.today().strftime('%d %b %Y')})" + if is_release_candidate: + whatsnew_title += " [release candidate]" + # TODO: automate + message = f"In {rsts.release.name}: set the page title to:\n{whatsnew_title}\n" + _wait_for_done(message) + + message = ( + f"In {rsts.release.name}: ensure the page title underline is " + "the exact same length as the page title text." + ) + _wait_for_done(message) + + dropdown_title = f"\n{release_strings.series} Release Highlights\n" + message = ( + f"In {rsts.release.name}: set the sphinx-design dropdown title to:{dropdown_title}" + ) + _wait_for_done(message) + + message = ( + f"Review {rsts.release.name} to ensure it is a good reflection of " + f"what is new in {release_strings.series}." + ) + _wait_for_done(message) + + message = ( + "Work with the development team to populate the Release " + f"Highlights dropdown section at the top of {rsts.release.name}." + ) + _wait_for_done(message) + + else: + message = ( + "Create a patch dropdown section at the top of " + f"{rsts.release.name}.\n" + f"See {rsts.template} for how this should be written." + ) + _wait_for_done(message) + + if first_in_series: + # TODO: automate + message = ( + "Remove the What's New template file.\n" + f"git rm {rsts.template.absolute()};" + ) + _wait_for_done(message) + + message = ( + "Commit and push all the What's New changes.\n" + f'git commit -am "Whats new updates for {release_strings.tag} .";\n' + f"git push -u origin {working_branch};" + ) + _wait_for_done(message) + + message = ( + f"Follow the Pull Request process to get {working_branch} " + f"merged into upstream/{release_strings.branch} .\n" + "Make sure the documentation is previewed during this process." + ) + _wait_for_done(message) + + return rsts + + +def cut_release( + release_strings: ReleaseStrings, is_release_candidate: bool +) -> None: + _break_print("The release ...") + + message = ( + "Visit https://github.com/SciTools/iris/releases/new to open " + "a blank new-release web page." + ) + _wait_for_done(message) + + message = ( + f"Select {release_strings.branch} as the Target.\n" + f"Input {release_strings.tag} as the new tag to create, and also as " + "the Release title." + ) + _wait_for_done(message) + + message = ( + "Craft an appropriate release description in the main text box.\n" + "Be sure to mention the What's New entry, conda-forge and PyPI - you " + "will need to return later to make these into links.\n" + "Be careful to change the appropriate words if copying from a " + "previous release description." + ) + _wait_for_done(message) + + if is_release_candidate: + message = ( + "This is a release candidate - include the following instructions " + "for installing with conda or pip:\n" + f"conda install -c conda-forge/label/rc_iris iris={release_strings.release}\n" + f"pip install scitools-iris=={release_strings.release}" + ) + _wait_for_done(message) + + message = ( + "This is a release candidate - tick the box to set this as a " + "pre-release." + ) + _wait_for_done(message) + + else: + message = "Tick the box to set this as the latest release." + _wait_for_done(message) + + message = "Click: Publish release !" + _wait_for_done(message) + + message = ( + "The CI will now run against this new tag, including automatically " + "publishing to PyPI." + ) + _break_print(message) + + url = "https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml" + message = ( + f"Visit {url} to monitor the building, testing and publishing of " + "the Iris sdist and binary wheel to PyPI." + ) + _wait_for_done(message) + + +def check_rtd( + release_strings: ReleaseStrings, is_release_candidate: bool +) -> None: + _break_print("Read the Docs checks ...") + + message = ( + "Visit https://readthedocs.org/projects/scitools-iris/versions/ and " + "make sure you are logged in." + ) + _wait_for_done(message) + + message = f"Set {release_strings.tag} to Active, un-Hidden." + _wait_for_done(message) + + message = f"Set {release_strings.branch} to Active, Hidden." + _wait_for_done(message) + + message = ( + "Keep the latest 2 branch doc builds active - those formatted 0.0.x - " + "deactivate older ones." + ) + _wait_for_done(message) + + message = ( + f"Visit https://scitools-iris.readthedocs.io/en/{release_strings.tag} " + "to confirm:\n\n" + "- The docs have rendered.\n" + f"- The version badge in the top left reads: {release_strings.tag} .\n" + " (this demonstrates that setuptools_scm has worked correctly).\n" + "- The What's New looks correct.\n" + f"- {release_strings.tag} is available in RTD's version switcher.\n\n" + "NOTE: the docs can take several minutes to finish building." + ) + if not is_release_candidate: + message += ( + "- Selecting 'stable' in the version switcher also brings up the " + f"{release_strings.tag} render." + ) + _wait_for_done(message) + + message = ( + f"Visit https://scitools-iris.readthedocs.io/en/{release_strings.branch} " + "to confirm:\n\n" + "- The docs have rendered\n" + f"- The version badge in the top left includes: {release_strings.branch} .\n" + f"- {release_strings.branch} is NOT available in RTD's version switcher.\n\n" + "NOTE: the docs can take several minutes to finish building." + ) + _wait_for_done(message) + + +def check_pypi( + release_strings: ReleaseStrings, is_release_candidate: bool +) -> str: + _break_print("PyPI checks ...") + _break_print("If anything goes wrong, manual steps are in the documentation.") + + message = ( + "Confirm that the following URL is correctly populated:\n" + f"https://pypi.org/project/scitools-iris/{release_strings.release}/" + ) + _wait_for_done(message) + + message = ( + f"Confirm that {release_strings.release} is at the top of this page:\n" + "https://pypi.org/project/scitools-iris/#history" + ) + _wait_for_done(message) + + if is_release_candidate: + message = ( + f"Confirm that {release_strings.release} is marked as a " + f"pre-release on this page:\n" + "https://pypi.org/project/scitools-iris/#history" + ) + else: + message = ( + f"Confirm that {release_strings.release} is the tag shown on the " + "scitools-iris PyPI homepage:\n" + "https://pypi.org/project/scitools-iris/" + ) + _wait_for_done(message) + + message = ( + f"Visit the below and click `view hashes` for the Source Distribution" + f"(`.tar.gz`):\n" + f"https://pypi.org/project/scitools-iris/{release_strings.release}#files\n" + ) + sha256 = _get_input(message, "Input the SHA256 hash") + + message = ( + "Confirm that pip install works as expected:\n" + f"pip install scitools-iris=={release_strings.release};" + ) + _wait_for_done(message) + + return sha256 + + +def update_conda_forge( + release_strings: ReleaseStrings, is_release_candidate: bool, sha256: str +) -> None: + _break_print("conda-forge updates ...") + + if not is_release_candidate: + message = ( + "NOTE: after several hours conda-forge automation will create a " + "Pull Request against conda-forge/iris-feedstock (via the " + "regro-cf-autotick-bot). Quicker to sort it now, manually ..." + ) + _break_print(message) + + message = ( + "Make sure you have a GitHub fork of:\n" + "https://github.com/conda-forge/iris-feedstock" + ) + _wait_for_done(message) + + message = ( + "Make sure you have a local clone of your iris-feedstock fork.\n" + "`cd` into your clone." + ) + _wait_for_done(message) + + if is_release_candidate: + upstream_branch = "release-candidate" + else: + upstream_branch = "main" + + # TODO: automate + message = ( + "Checkout a new branch for the conda-forge changes for this release:\n" + "git fetch upstream;\n" + f"git checkout upstream/{upstream_branch} -b {release_strings.tag};\n" + ) + _wait_for_done(message) + + message = ( + "Update ./recipe/meta.yaml:\n\n" + f"- The version at the very top of the file: {release_strings.release}\n" + f"- The sha256 hash: {sha256}\n" + "- Requirements: align the packages and pins with those in the Iris repo\n" + "- Maintainers: update with any changes to the dev team\n" + "- MAKE SURE everything else is correct - plenty of other things " + "might need one-off changes.\n" + ) + _wait_for_done(message) + + # TODO: automate + message = ( + "No other file normally needs changing in iris-feedstock, so push up " + "the changes to prepare for a Pull Request:\n" + f'git commit -am "Recipe updates for {release_strings.tag} .";\n' + f"git push -u origin {release_strings.tag};" + ) + _wait_for_done(message) + + message = ( + f"Follow the Pull Request process to get {release_strings.tag} branch " + f"merged into upstream/{upstream_branch} .\n" + "Specific conda-forge guidance will be automatically given once the " + "PR is created." + ) + _wait_for_done(message) + + message = ( + f"Confirm that {release_strings.release} appears in this list:\n" + "https://anaconda.org/conda-forge/iris/files" + ) + _wait_for_done(message) + + if not is_release_candidate: + message = ( + f"Confirm that {release_strings.release} is displayed on this " + "page as the latest available:\n" + "https://anaconda.org/conda-forge/iris" + ) + _wait_for_done(message) + + if is_release_candidate: + channel_command = " -c conda-forge/label/rc_iris " + else: + channel_command = " " + message = ( + "Confirm that conda (or mamba) install works as expected:\n" + f"conda create -n tmp_iris{channel_command}iris={release_strings.release};\n" + f"conda remove -n tmp_iris --all;" + ) + _wait_for_done(message) + + +def update_links(release_strings: ReleaseStrings) -> None: + _break_print("Link updates ...") + + message = ( + "Revisit the GitHub release:\n" + f"https://github.com/SciTools/iris/releases/tag/{release_strings.tag}\n" + "You have confirmed that Read the Docs, PyPI and conda-forge have all " + "updated correctly. Include the following links in the release " + "notes:\n\n" + f"https://scitools-iris.readthedocs.io/en/{release_strings.tag}/\n" + f"https://pypi.org/project/scitools-iris/{release_strings.release}/\n" + f"https://anaconda.org/conda-forge/iris?version={release_strings.release}\n" + ) + _wait_for_done(message) + + message = ( + "Update the release page in GitHub discussions, with the above links " + "and anything else appropriate.\n" + "https://github.com/SciTools/iris/discussions" + ) + _wait_for_done(message) + + +def twitter_announce( + release_strings: ReleaseStrings, first_in_series: bool +) -> None: + message = ( + "Announce the release via https://twitter.com/scitools_iris, and any " + "other appropriate message boards (e.g. Yammer).\n" + "Any content used for the announcement should be stored in the " + "SciTools/twitter-scitools-iris GitHub repo.\n" + ) + if not first_in_series: + message += ( + f"Consider replying within an existing {release_strings.series} " + "announcement thread, if appropriate." + ) + _wait_for_done(message) + + +def update_citation( + release_strings: ReleaseStrings, is_release_candidate: bool +) -> None: + if not is_release_candidate: + src_dir = Path(__file__).parents[1] / "docs" / "src" + citation_rst = src_dir / "userguide" / "citation.rst" + assert citation_rst.is_file() + message = ( + f"Follow the Pull Request process to update {citation_rst.name} " + "with the correct dates, DOI and version string for " + f"{release_strings.tag}.\n" + f"{citation_rst.absolute()}\n\n" + f"The PR should target {release_strings.branch} (prior to merge-back)." + ) + _wait_for_done(message) + + +def merge_back( + release_strings: ReleaseStrings, first_in_series: bool, rsts: WhatsNewRsts +) -> None: + _break_print("Branch merge-back ...") + + merge_commit = ( + "BE SURE TO MERGE VIA A MERGE-COMMIT (not a squash-commit), to " + "preserve the commit SHA's." + ) + + if first_in_series: + # TODO: automate + + working_branch = release_strings.branch + ".mergeback" + _delete_local_branch(working_branch) + message = ( + "Checkout a local branch from the official ``main`` branch.\n" + "git fetch upstream;\n" + f"git checkout upstream/main -b {working_branch};" + ) + _wait_for_done(message) + + message = ( + f"Merge in the commits from {release_strings.branch}.\n" + f"{merge_commit}\n" + f"git merge upstream/{release_strings.branch} --no-ff " + '-m "Merging release branch into main";' + ) + _wait_for_done(message) + + message = ( + "Recreate the following files, which are present in ``main``, but " + f"are currently deleted from {working_branch}:\n" + f"{rsts.latest.absolute()}\n" + f"{rsts.template.absolute()}\n" + "THEN:\n" + f"git add {rsts.latest.absolute()};\n" + f"git add {rsts.template.absolute()};\n" + ) + _wait_for_done(message) + + message = ( + f"In {rsts.index.absolute()}:\n" + f"Add {rsts.latest.name} to the top of the list of .rst files, " + f"and set the top include:: to be {rsts.latest.name} ." + ) + _wait_for_done(message) + + message = ( + "Commit and push all the What's New changes.\n" + 'git commit -am "Restore latest Whats New files.";\n' + f"git push -u origin {working_branch};" + ) + _wait_for_done(message) + + message = ( + "Follow the Pull Request process to get " + f"{working_branch} merged into upstream/main .\n" + "Make sure the documentation is previewed during this process.\n" + f"{merge_commit}" + ) + _wait_for_done(message) + + else: + message = ( + f"Propose a merge-back from {release_strings.branch} into ``main`` by " + f"visiting this URL and clicking `Create pull request`:\n" + f"https://github.com/SciTools/iris/compare/main...{release_strings.branch}\n" + f"{merge_commit}" + ) + _wait_for_done(message) + message = ( + f"Once the pull request is merged ensure that the {release_strings.branch} " + "release branch is restored.\n" + "GitHub automation rules may have automatically deleted the release branch." + ) + _wait_for_done(message) + + +def main(): + _mark_section(1) + release_type = get_release_type() + + _mark_section(2) + release_strings = get_release_tag() + + _mark_section(3) + is_release_candidate = check_release_candidate( + release_type, + release_strings, + ) + + _mark_section(4) + is_first_in_series = check_first_in_series( + release_type, + release_strings, + is_release_candidate, + ) + + _mark_section(5) + update_standard_names( + is_first_in_series, + ) + + _mark_section(6) + check_deprecations( + release_type, + ) + + _mark_section(7) + create_release_branch( + release_strings, + is_first_in_series, + ) + + _mark_section(8) + whats_new_rsts = finalise_whats_new( + release_type, + release_strings, + is_release_candidate, + is_first_in_series, + ) + + _mark_section(9) + cut_release( + release_strings, + is_release_candidate, + ) + + _mark_section(10) + check_rtd( + release_strings, + is_release_candidate, + ) + + _mark_section(11) + sha256 = check_pypi( + release_strings, + is_release_candidate, + ) + + _mark_section(12) + update_conda_forge( + release_strings, + is_release_candidate, + sha256, + ) + + _mark_section(13) + update_links( + release_strings, + ) + + _mark_section(14) + twitter_announce( + release_strings, + is_first_in_series, + ) + + _mark_section(15) + update_citation( + release_strings, + is_release_candidate, + ) + + _mark_section(16) + merge_back( + release_strings, + is_first_in_series, + whats_new_rsts, + ) + + _break_print("RELEASE COMPLETE. Congratulations! 🎉") + + +if __name__ == "__main__": + main() diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index dc898784ae..073f86cda6 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -8,7 +8,7 @@ that nox uses for testing each different supported version of python. Typical usage: - python tools/update_lockfiles.py -o requirements/ci/nox.lock requirements/ci/py*.yml + python tools/update_lockfiles.py -o requirements/locks requirements/py*.yml """ @@ -54,7 +54,7 @@ fname = '.'.join(fname.split('.')[:-1]) # conda-lock --filename-template expects a string with a "...{platform}..." - # placeholder in it, so we have to build the .lock filname without + # placeholder in it, so we have to build the .lock filename without # using .format ofile_template = Path(args.output_dir) / (fname+'-{platform}.lock') subprocess.call([
👍