diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index b197b58e80..9ae3534c76 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -6,6 +6,12 @@ on:
schedule:
# Runs every day at 23:00.
- cron: "0 23 * * *"
+ workflow_dispatch:
+ inputs:
+ first_commit:
+ description: "Argument to be passed to the overnight benchmark script."
+ required: false
+ type: string
jobs:
benchmark:
@@ -15,7 +21,7 @@ jobs:
env:
IRIS_TEST_DATA_LOC_PATH: benchmarks
IRIS_TEST_DATA_PATH: benchmarks/iris-test-data
- IRIS_TEST_DATA_VERSION: "2.15"
+ IRIS_TEST_DATA_VERSION: "2.18"
# Lets us manually bump the cache to rebuild
ENV_CACHE_BUILD: "0"
TEST_DATA_CACHE_BUILD: "2"
@@ -64,7 +70,12 @@ jobs:
- name: Run overnight benchmarks
run: |
- first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1)
+ first_commit=${{ inputs.first_commit }}
+ if [ "$first_commit" == "" ]
+ then
+ first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1)
+ fi
+
if [ "$first_commit" != "" ]
then
nox --session="benchmarks(overnight)" -- $first_commit
diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml
index 43bacd3ec5..cee98dc33d 100644
--- a/.github/workflows/ci-tests.yml
+++ b/.github/workflows/ci-tests.yml
@@ -46,7 +46,7 @@ jobs:
session: "tests"
env:
- IRIS_TEST_DATA_VERSION: "2.16"
+ IRIS_TEST_DATA_VERSION: "2.18"
ENV_NAME: "ci-tests"
steps:
diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml
index a45cccfeab..94c20aedb9 100644
--- a/.github/workflows/refresh-lockfiles.yml
+++ b/.github/workflows/refresh-lockfiles.yml
@@ -1,13 +1,5 @@
-# This workflow periodically creates new environment lock files based on the newest
-# available packages and dependencies.
-#
-# Environment specifications are given as conda environment.yml files found in
-# `requirements/ci/py**.yml`. These state the packages required, the conda channels
-# that the packages will be pulled from, and any versions of packages that need to be
-# pinned at specific versions.
-#
-# For environments that have changed, a pull request will be made and submitted
-# to the main branch
+# Updates the environment lock files. See the called workflow in the
+# scitools/workflows repo for more details.
name: Refresh Lockfiles
@@ -20,98 +12,7 @@ on:
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
- cron: "1 0 * * 6"
-
jobs:
-
- get_python_matrix:
- # Determines which Python versions should be included in the matrix used in
- # the gen_lockfiles job.
- if: "github.repository == 'SciTools/iris'"
- runs-on: ubuntu-latest
- outputs:
- matrix: ${{ steps.get_py.outputs.matrix }}
- steps:
- - uses: actions/checkout@v3
- - id: get_py
- run: echo "::set-output name=matrix::$(ls -1 requirements/ci/py*.yml | xargs -n1 basename | sed 's/....$//' | jq -cnR '[inputs]')"
-
- gen_lockfiles:
- # this is a matrix job: it splits to create new lockfiles for each
- # of the CI test python versions.
- if: "github.repository == 'SciTools/iris'"
- runs-on: ubuntu-latest
- needs: get_python_matrix
-
- strategy:
- matrix:
- python: ${{ fromJSON(needs.get_python_matrix.outputs.matrix) }}
-
- steps:
- - uses: actions/checkout@v3
- - name: install requirements
- run: |
- source $CONDA/bin/activate base
- conda install -y -c conda-forge conda-libmamba-solver conda-lock
- - name: generate lockfile
- env:
- CONDA_EXPERIMENTAL_SOLVER: libmamba
- run: |
- $CONDA/bin/conda-lock lock -k explicit -p linux-64 -f requirements/ci/${{matrix.python}}.yml
- mv conda-linux-64.lock ${{matrix.python}}-linux-64.lock
- - name: output lockfile
- uses: actions/upload-artifact@v3
- with:
- path: ${{matrix.python}}-linux-64.lock
-
- create_pr:
- # once the matrix job has completed all the lock files will have been uploaded as artifacts.
- # Download the artifacts, add them to the repo, and create a PR.
- if: "github.repository == 'SciTools/iris'"
- runs-on: ubuntu-latest
- needs: gen_lockfiles
-
- steps:
- - uses: actions/checkout@v3
- - name: get artifacts
- uses: actions/download-artifact@v3
- with:
- path: artifacts
-
- - name: Update lock files in repo
- run: |
- cp artifacts/artifact/*.lock requirements/ci/nox.lock
- rm -r artifacts
-
- - name: "Generate token"
- uses: tibdex/github-app-token@v1
- id: generate-token
- with:
- app_id: ${{ secrets.AUTH_APP_ID }}
- private_key: ${{ secrets.AUTH_APP_PRIVATE_KEY }}
-
- - name: Create Pull Request
- id: cpr
- uses: peter-evans/create-pull-request@671dc9c9e0c2d73f07fa45a3eb0220e1622f0c5f
- with:
- token: ${{ steps.generate-token.outputs.token }}
- commit-message: Updated environment lockfiles
- committer: "Lockfile bot "
- author: "Lockfile bot "
- delete-branch: true
- branch: auto-update-lockfiles
- title: "[iris.ci] environment lockfiles auto-update"
- body: |
- Lockfiles updated to the latest resolvable environment.
-
- ### If the CI tasks fail, create a new branch based on this PR and add the required fixes to that branch.
- labels: |
- New: Pull Request
- Bot
-
- - name: Check Pull Request
- if: steps.cpr.outputs.pull-request-number != ''
- run: |
- echo "pull-request #${{ steps.cpr.outputs.pull-request-number }}"
- echo "pull-request URL ${{ steps.cpr.outputs.pull-request-url }}"
- echo "pull-request operation [${{ steps.cpr.outputs.pull-request-operation }}]"
- echo "pull-request head SHA ${{ steps.cpr.outputs.pull-request-head-sha }}"
+ refresh_lockfiles:
+ uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@main
+ secrets: inherit
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index c65f37284f..44b77e5c7d 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -14,7 +14,7 @@ jobs:
if: "github.repository == 'SciTools/iris'"
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@v6
+ - uses: actions/stale@v7
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 22746cb0ee..7c95eeaca3 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.3.0
+ rev: v4.4.0
hooks:
# Prevent giant files from being committed.
- id: check-added-large-files
@@ -29,32 +29,31 @@ repos:
- id: no-commit-to-branch
- repo: https://github.com/psf/black
- rev: 22.8.0
+ rev: 23.1.0
hooks:
- id: black
pass_filenames: false
args: [--config=./pyproject.toml, .]
- repo: https://github.com/PyCQA/flake8
- rev: 5.0.4
+ rev: 6.0.0
hooks:
- id: flake8
types: [file, python]
args: [--config=./setup.cfg]
- repo: https://github.com/pycqa/isort
- rev: 5.10.1
+ rev: 5.12.0
hooks:
- id: isort
types: [file, python]
args: [--filter-files]
- repo: https://github.com/asottile/blacken-docs
- rev: v1.12.1
+ rev: 1.13.0
hooks:
- id: blacken-docs
types: [file, rst]
- additional_dependencies: [black==21.6b0]
- repo: https://github.com/aio-libs/sort-all
rev: v1.2.0
diff --git a/README.md b/README.md
index ac2781f469..cdf4b2b043 100644
--- a/README.md
+++ b/README.md
@@ -54,3 +54,24 @@ For documentation see the
developer version or the most recent released
stable version.
+
+## [#ShowYourStripes](https://showyourstripes.info/s/globe)
+
+
+
+
+
+
+**Graphics and Lead Scientist**: [Ed Hawkins](http://www.met.reading.ac.uk/~ed/home/index.php), National Centre for Atmospheric Science, University of Reading.
+
+**Data**: Berkeley Earth, NOAA, UK Met Office, MeteoSwiss, DWD, SMHI, UoR, Meteo France & ZAMG.
+
+
+#ShowYourStripes is distributed under a
+Creative Commons Attribution 4.0 International License
+
+
+
+
diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py
index 2f9bb04e35..2e40c525a6 100644
--- a/benchmarks/benchmarks/experimental/ugrid/__init__.py
+++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py
@@ -50,7 +50,7 @@ def time_create(self, *params):
class Connectivity(UGridCommon):
def setup(self, n_faces):
- self.array = np.zeros([n_faces, 3], dtype=np.int)
+ self.array = np.zeros([n_faces, 3], dtype=int)
super().setup(n_faces)
def create(self):
diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py
index 78b971d9de..52a5aceca8 100644
--- a/benchmarks/benchmarks/generate_data/__init__.py
+++ b/benchmarks/benchmarks/generate_data/__init__.py
@@ -113,7 +113,7 @@ def load_realised():
file loading, but some benchmarks are only meaningful if starting with real
arrays.
"""
- from iris.fileformats.netcdf import _get_cf_var_data as pre_patched
+ from iris.fileformats.netcdf.loader import _get_cf_var_data as pre_patched
def patched(cf_var, filename):
return as_concrete_data(pre_patched(cf_var, filename))
diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py
index ad54c23122..fc32ac289b 100644
--- a/benchmarks/benchmarks/import_iris.py
+++ b/benchmarks/benchmarks/import_iris.py
@@ -5,10 +5,30 @@
# licensing details.
from importlib import import_module, reload
+################
+# Prepare info for reset_colormaps:
+
+# Import and capture colormaps.
+from matplotlib import colormaps # isort:skip
+
+_COLORMAPS_ORIG = set(colormaps)
+
+# Import iris.palette, which modifies colormaps.
+import iris.palette
+
+# Derive which colormaps have been added by iris.palette.
+_COLORMAPS_MOD = set(colormaps)
+COLORMAPS_EXTRA = _COLORMAPS_MOD - _COLORMAPS_ORIG
+
+# Touch iris.palette to prevent linters complaining.
+_ = iris.palette
+
+################
+
class Iris:
@staticmethod
- def _import(module_name):
+ def _import(module_name, reset_colormaps=False):
"""
Have experimented with adding sleep() commands into the imported
modules. The results reveal:
@@ -25,6 +45,13 @@ def _import(module_name):
and the repetitions are therefore no faster than the first run.
"""
mod = import_module(module_name)
+
+ if reset_colormaps:
+ # Needed because reload() will attempt to register new colormaps a
+ # second time, which errors by default.
+ for cm_name in COLORMAPS_EXTRA:
+ colormaps.unregister(cm_name)
+
reload(mod)
def time_iris(self):
@@ -205,7 +232,7 @@ def time_iterate(self):
self._import("iris.iterate")
def time_palette(self):
- self._import("iris.palette")
+ self._import("iris.palette", reset_colormaps=True)
def time_plot(self):
self._import("iris.plot")
diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py
index c315119c11..44bd1b6c95 100644
--- a/benchmarks/benchmarks/regridding.py
+++ b/benchmarks/benchmarks/regridding.py
@@ -12,8 +12,11 @@
# importing anything else
from iris import tests # isort:skip
+import numpy as np
+
import iris
-from iris.analysis import AreaWeighted
+from iris.analysis import AreaWeighted, PointInCell
+from iris.coords import AuxCoord
class HorizontalChunkedRegridding:
@@ -53,3 +56,48 @@ def time_regrid_area_w_new_grid(self) -> None:
out = self.chunked_cube.regrid(self.template_cube, self.scheme_area_w)
# Realise data
out.data
+
+
+class CurvilinearRegridding:
+ def setup(self) -> None:
+ # Prepare a cube and a template
+
+ cube_file_path = tests.get_data_path(
+ ["NetCDF", "regrid", "regrid_xyt.nc"]
+ )
+ self.cube = iris.load_cube(cube_file_path)
+
+ # Make the source cube curvilinear
+ x_coord = self.cube.coord("longitude")
+ y_coord = self.cube.coord("latitude")
+ xx, yy = np.meshgrid(x_coord.points, y_coord.points)
+ self.cube.remove_coord(x_coord)
+ self.cube.remove_coord(y_coord)
+ x_coord_2d = AuxCoord(
+ xx,
+ standard_name=x_coord.standard_name,
+ units=x_coord.units,
+ coord_system=x_coord.coord_system,
+ )
+ y_coord_2d = AuxCoord(
+ yy,
+ standard_name=y_coord.standard_name,
+ units=y_coord.units,
+ coord_system=y_coord.coord_system,
+ )
+ self.cube.add_aux_coord(x_coord_2d, (1, 2))
+ self.cube.add_aux_coord(y_coord_2d, (1, 2))
+
+ template_file_path = tests.get_data_path(
+ ["NetCDF", "regrid", "regrid_template_global_latlon.nc"]
+ )
+ self.template_cube = iris.load_cube(template_file_path)
+
+ # Prepare a regridding scheme
+ self.scheme_pic = PointInCell()
+
+ def time_regrid_pic(self) -> None:
+ # Regrid the cube onto the template.
+ out = self.cube.regrid(self.template_cube, self.scheme_pic)
+ # Realise the data
+ out.data
diff --git a/docs/Makefile b/docs/Makefile
index f4c8d0b7f4..fcb0ec0116 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -20,11 +20,6 @@ html-quick:
echo "make html-quick in $$i..."; \
(cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-quick); done
-spelling:
- @for i in $(SUBDIRS); do \
- echo "make spelling in $$i..."; \
- (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) spelling); done
-
all:
@for i in $(SUBDIRS); do \
echo "make all in $$i..."; \
@@ -55,3 +50,8 @@ linkcheck:
echo "Running linkcheck in $$i..."; \
(cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) linkcheck); done
+show:
+ @for i in $(SUBDIRS); do \
+ echo "Running show in $$i..."; \
+ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) show); done
+
diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py
index 78401817ba..aad7906acd 100644
--- a/docs/gallery_code/general/plot_lineplot_with_legend.py
+++ b/docs/gallery_code/general/plot_lineplot_with_legend.py
@@ -24,7 +24,6 @@ def main():
)
for cube in temperature.slices("longitude"):
-
# Create a string label to identify this cube (i.e. latitude: value).
cube_label = "latitude: %s" % cube.coord("latitude").points[0]
diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py
index 75122591b9..2cf42e66e0 100644
--- a/docs/gallery_code/general/plot_projections_and_annotations.py
+++ b/docs/gallery_code/general/plot_projections_and_annotations.py
@@ -26,7 +26,6 @@
def make_plot(projection_name, projection_crs):
-
# Create a matplotlib Figure.
plt.figure()
diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py
index 08a9578e63..195f8b4bb0 100644
--- a/docs/gallery_code/general/plot_zonal_means.py
+++ b/docs/gallery_code/general/plot_zonal_means.py
@@ -16,7 +16,6 @@
def main():
-
# Loads air_temp.pp and "collapses" longitude into a single, average value.
fname = iris.sample_data_path("air_temp.pp")
temperature = iris.load_cube(fname)
diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py
index 5cd2752f39..e15aa0e6ef 100644
--- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py
+++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py
@@ -86,7 +86,6 @@ def main():
# Iterate over all possible latitude longitude slices.
for cube in last_timestep.slices(["latitude", "longitude"]):
-
# Get the ensemble member number from the ensemble coordinate.
ens_member = cube.coord("realization").points[0]
diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py
index dc038ecffe..6604b61ec3 100644
--- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py
+++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py
@@ -34,7 +34,7 @@ def main():
# the southern portion of the domain, and limit the depth of the profile
# to 1000m.
lon_cons = iris.Constraint(longitude=330.5)
- lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9)
+ lat_cons = iris.Constraint(latitude=lambda lat: -10 < lat < -9)
depth_cons = iris.Constraint(depth=lambda d: d <= 1000)
theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons)
salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons)
diff --git a/docs/src/Makefile b/docs/src/Makefile
index c693a2c900..a75da5371b 100644
--- a/docs/src/Makefile
+++ b/docs/src/Makefile
@@ -16,7 +16,7 @@ PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-.PHONY: help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+.PHONY: help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest show
help:
@echo "Please use \`make ' where is one of"
@@ -36,6 +36,7 @@ help:
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " show to open the built documentation in the default browser"
clean:
-rm -rf $(BUILDDIR)
@@ -61,11 +62,6 @@ html-quick:
@echo
@echo "Build finished. The HTML (no gallery or api docs) pages are in $(BUILDDIR)/html"
-spelling:
- $(SPHINXBUILD) -b spelling $(SRCDIR) $(BUILDDIR)
- @echo
- @echo "Build finished. The HTML (no gallery) pages are in $(BUILDDIR)/html"
-
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@@ -153,3 +149,7 @@ doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
+
+show:
+ @python -c "import webbrowser; webbrowser.open_new_tab('file://$(shell pwd)/$(BUILDDIR)/html/index.html')"
+
diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html
index 7377e866b7..974bd12753 100644
--- a/docs/src/_templates/layout.html
+++ b/docs/src/_templates/layout.html
@@ -10,9 +10,8 @@
{% if on_rtd and rtd_version == 'latest' %}
You are viewing the
latest unreleased documentation
-
v{{ version }}. You may prefer a
-
stable
- version.
+
v{{ version }}. You can switch to a
stable version
+ via the flyout menu in the bottom corner of the screen.
{%- endif %}
diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc
index ec7e1efd6d..17278460dd 100644
--- a/docs/src/common_links.inc
+++ b/docs/src/common_links.inc
@@ -9,7 +9,7 @@
.. _conda: https://docs.conda.io/en/latest/
.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
-.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account
+.. _generating ssh keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account
.. _GitHub Actions: https://docs.github.com/en/actions
.. _GitHub Help Documentation: https://docs.github.com/en/github
.. _GitHub Discussions: https://github.com/SciTools/iris/discussions
@@ -21,7 +21,7 @@
.. _isort: https://pycqa.github.io/isort/
.. _issue: https://github.com/SciTools/iris/issues
.. _issues: https://github.com/SciTools/iris/issues
-.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/
+.. _legacy documentation: https://github.com/SciTools/scitools.org.uk/tree/master/iris/docs/archive
.. _matplotlib: https://matplotlib.org/stable/
.. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html
.. _nox: https://nox.thea.codes/en/stable/
@@ -39,22 +39,26 @@
.. _requirements/ci/: https://github.com/SciTools/iris/tree/main/requirements/ci
.. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/
.. _issues on GitHub: https://github.com/SciTools/iris/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc
+.. _python-stratify: https://github.com/SciTools/python-stratify
+.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid
.. comment
- Core developers (@github names) in alphabetical order:
+ Core developers and prolific contributors (@github names) in alphabetical order:
.. _@abooton: https://github.com/abooton
.. _@alastair-gemmell: https://github.com/alastair-gemmell
.. _@ajdawson: https://github.com/ajdawson
.. _@bjlittle: https://github.com/bjlittle
.. _@bouweandela: https://github.com/bouweandela
+.. _@bsherratt: https://github.com/bsherratt
.. _@corinnebosley: https://github.com/corinnebosley
.. _@cpelley: https://github.com/cpelley
.. _@djkirkham: https://github.com/djkirkham
.. _@DPeterK: https://github.com/DPeterK
.. _@ESadek-MO: https://github.com/ESadek-MO
.. _@esc24: https://github.com/esc24
+.. _@HGWright: https://github.com/HGWright
.. _@jamesp: https://github.com/jamesp
.. _@jonseddon: https://github.com/jonseddon
.. _@jvegasbsc: https://github.com/jvegasbsc
diff --git a/docs/src/community/index.rst b/docs/src/community/index.rst
new file mode 100644
index 0000000000..114cb96fe9
--- /dev/null
+++ b/docs/src/community/index.rst
@@ -0,0 +1,58 @@
+.. include:: ../common_links.inc
+
+.. todo:
+ consider scientific-python.org
+ consider scientific-python.org/specs/
+
+Iris in the Community
+=====================
+
+Iris aims to be a valuable member of the open source scientific Python
+community.
+
+We listen out for developments in our dependencies and neighbouring projects,
+and we reach out to them when we can solve problems together; please feel free
+to reach out to us!
+
+We are aware of our place in the user's wider 'toolbox' - offering unique
+functionality and interoperating smoothly with other packages.
+
+We welcome contributions from all; whether that's an opinion, a 1-line
+clarification, or a whole new feature 🙂
+
+Quick Links
+-----------
+
+* `GitHub Discussions`_
+* :ref:`Getting involved`
+* `Twitter `_
+
+Interoperability
+----------------
+
+There's a big choice of Python tools out there! Each one has strengths and
+weaknesses in different areas, so we don't want to force a single choice for your
+whole workflow - we'd much rather make it easy for you to choose the right tool
+for the moment, switching whenever you need. Below are our ongoing efforts at
+smoother interoperability:
+
+.. not using toctree due to combination of child pages and cross-references.
+
+* The :mod:`iris.pandas` module
+* :doc:`iris_xarray`
+
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+
+ iris_xarray
+
+Plugins
+-------
+
+Iris can be extended with **plugins**! See below for further information:
+
+.. toctree::
+ :maxdepth: 2
+
+ plugins
diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst
new file mode 100644
index 0000000000..859597da78
--- /dev/null
+++ b/docs/src/community/iris_xarray.rst
@@ -0,0 +1,154 @@
+.. include:: ../common_links.inc
+
+======================
+Iris ❤️ :term:`Xarray`
+======================
+
+There is a lot of overlap between Iris and :term:`Xarray`, but some important
+differences too. Below is a summary of the most important differences, so that
+you can be prepared, and to help you choose the best package for your use case.
+
+Overall Experience
+------------------
+
+Iris is the more specialised package, focussed on making it as easy
+as possible to work with meteorological and climatological data. Iris
+is built to natively handle many key concepts, such as the CF conventions,
+coordinate systems and bounded coordinates. Iris offers a smaller toolkit of
+operations compared to Xarray, particularly around API for sophisticated
+computation such as array manipulation and multi-processing.
+
+Xarray's more generic data model and community-driven development give it a
+richer range of operations and broader possible uses. Using Xarray
+specifically for meteorology/climatology may require deeper knowledge
+compared to using Iris, and you may prefer to add Xarray plugins
+such as :ref:`cfxarray` to get the best experience. Advanced users can likely
+achieve better performance with Xarray than with Iris.
+
+Conversion
+----------
+There are multiple ways to convert between Iris and Xarray objects.
+
+* Xarray includes the :meth:`~xarray.DataArray.to_iris` and
+ :meth:`~xarray.DataArray.from_iris` methods - detailed in the
+ `Xarray IO notes on Iris`_. Since Iris evolves independently of Xarray, be
+ vigilant for concepts that may be lost during the conversion.
+* Because both packages are closely linked to the :term:`NetCDF Format`, it is
+ feasible to save a NetCDF file using one package then load that file using
+ the other package. This will be lossy in places, as both Iris and Xarray
+ are opinionated on how certain NetCDF concepts relate to their data models.
+* The Iris development team are exploring an improved 'bridge' between the two
+ packages. Follow the conversation on GitHub: `iris#4994`_. This project is
+ expressly intended to be as lossless as possible.
+
+Regridding
+----------
+Iris and Xarray offer a range of regridding methods - both natively and via
+additional packages such as `iris-esmf-regrid`_ and `xESMF`_ - which overlap
+in places
+but tend to cover a different set of use cases (e.g. Iris handles unstructured
+meshes but offers access to fewer ESMF methods). The behaviour of these
+regridders also differs slightly (even between different regridders attached to
+the same package) so the appropriate package to use depends highly on the
+particulars of the use case.
+
+Plotting
+--------
+Xarray and Iris have a large overlap of functionality when creating
+:term:`Matplotlib` plots and both support the plotting of multidimensional
+coordinates. This means the experience is largely similar using either package.
+
+Xarray supports further plotting backends through external packages (e.g. Bokeh through `hvPlot`_)
+and, if a user is already familiar with `pandas`_, the interface should be
+familiar. It also supports some different plot types to Iris, and therefore can
+be used for a wider variety of plots. It also has benefits regarding "out of
+the box", quick customisations to plots. However, if further customisation is
+required, knowledge of matplotlib is still required.
+
+In both cases, :term:`Cartopy` is/can be used. Iris does more work
+automatically for the user here, creating Cartopy
+:class:`~cartopy.mpl.geoaxes.GeoAxes` for latitude and longitude coordinates,
+whereas the user has to do this manually in Xarray.
+
+Statistics
+----------
+Both libraries are quite comparable with generally similar capabilities,
+performance and laziness. Iris offers more specificity in some cases, such as
+some more specific unique functions and masked tolerance in most statistics.
+Xarray seems more approachable however, with some less unique but more
+convenient solutions (these tend to be wrappers to :term:`Dask` functions).
+
+Laziness and Multi-Processing with :term:`Dask`
+-----------------------------------------------
+Iris and Xarray both support lazy data and out-of-core processing through
+utilisation of Dask.
+
+While both Iris and Xarray expose :term:`NumPy` conveniences at the API level
+(e.g. the `ndim()` method), only Xarray exposes Dask conveniences. For example
+:attr:`xarray.DataArray.chunks`, which gives the user direct control
+over the underlying Dask array chunks. The Iris API instead takes control of
+such concepts and user control is only possible by manipulating the underlying
+Dask array directly (accessed via :meth:`iris.cube.Cube.core_data`).
+
+:class:`xarray.DataArray`\ s comply with `NEP-18`_, allowing NumPy arrays to be
+based on them, and they also include the necessary extra members for Dask
+arrays to be based on them too. Neither of these is currently possible with
+Iris :class:`~iris.cube.Cube`\ s, although an ambition for the future.
+
+NetCDF File Control
+-------------------
+(More info: :term:`NetCDF Format`)
+
+Unlike Iris, Xarray generally provides full control of major file structures,
+i.e. dimensions + variables, including their order in the file. It mostly
+respects these in a file input, and can reproduce them on output.
+However, attribute handling is not so complete: like Iris, it interprets and
+modifies some recognised aspects, and can add some extra attributes not in the
+input.
+
+.. todo:
+ More detail on dates and fill values (@pp-mo suggestion).
+
+Handling of dates and fill values have some special problems here.
+
+Ultimately, nearly everything wanted in a particular desired result file can
+be achieved in Xarray, via provided override mechanisms (`loading keywords`_
+and the '`encoding`_' dictionaries).
+
+Missing Data
+------------
+Xarray uses :data:`numpy.nan` to represent missing values and this will support
+many simple use cases assuming the data are floats. Iris enables more
+sophisticated missing data handling by representing missing values as masks
+(:class:`numpy.ma.MaskedArray` for real data and :class:`dask.array.Array`
+for lazy data) which allows data to be any data type and to include either/both
+a mask and :data:`~numpy.nan`\ s.
+
+.. _cfxarray:
+
+`cf-xarray`_
+-------------
+Iris has a data model entirely based on :term:`CF Conventions`. Xarray has a
+data model based on :term:`NetCDF Format` with cf-xarray acting as translation
+into CF. Xarray/cf-xarray methods can be
+called and data accessed with CF like arguments (e.g. axis, standard name) and
+there are some CF specific utilities (similar
+to Iris utilities). Iris tends to cover more of and be stricter about CF.
+
+
+.. seealso::
+
+ * `Xarray IO notes on Iris`_
+ * `Xarray notes on other NetCDF libraries`_
+
+.. _Xarray IO notes on Iris: https://docs.xarray.dev/en/stable/user-guide/io.html#iris
+.. _Xarray notes on other NetCDF libraries: https://docs.xarray.dev/en/stable/getting-started-guide/faq.html#what-other-netcdf-related-python-libraries-should-i-know-about
+.. _loading keywords: https://docs.xarray.dev/en/stable/generated/xarray.open_dataset.html#xarray.open_dataset
+.. _encoding: https://docs.xarray.dev/en/stable/user-guide/io.html#writing-encoded-data
+.. _xESMF: https://github.com/pangeo-data/xESMF/
+.. _seaborn: https://seaborn.pydata.org/
+.. _hvPlot: https://hvplot.holoviz.org/
+.. _pandas: https://pandas.pydata.org/
+.. _NEP-18: https://numpy.org/neps/nep-0018-array-function-protocol.html
+.. _cf-xarray: https://github.com/xarray-contrib/cf-xarray
+.. _iris#4994: https://github.com/SciTools/iris/issues/4994
diff --git a/docs/src/community/plugins.rst b/docs/src/community/plugins.rst
new file mode 100644
index 0000000000..0d79d64623
--- /dev/null
+++ b/docs/src/community/plugins.rst
@@ -0,0 +1,68 @@
+.. _namespace package: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/
+
+.. _community_plugins:
+
+Plugins
+=======
+
+Iris supports **plugins** under the ``iris.plugins`` `namespace package`_.
+This allows packages that extend Iris' functionality to be developed and
+maintained independently, while still being installed into ``iris.plugins``
+instead of a separate package. For example, a plugin may provide loaders or
+savers for additional file formats, or alternative visualisation methods.
+
+
+Using plugins
+-------------
+
+Once a plugin is installed, it can be used either via the
+:func:`iris.use_plugin` function, or by importing it directly:
+
+.. code-block:: python
+
+ import iris
+
+ iris.use_plugin("my_plugin")
+ # OR
+ import iris.plugins.my_plugin
+
+
+Creating plugins
+----------------
+
+The choice of a `namespace package`_ makes writing a plugin relatively
+straightforward: it simply needs to appear as a folder within ``iris/plugins``,
+then can be distributed in the same way as any other package. An example
+repository layout:
+
+.. code-block:: text
+
+ + lib
+ + iris
+ + plugins
+ + my_plugin
+ - __init__.py
+ - (more code...)
+ - README.md
+ - pyproject.toml
+ - setup.cfg
+ - (other project files...)
+
+In particular, note that there must **not** be any ``__init__.py`` files at
+higher levels than the plugin itself.
+
+The package name - how it is referred to by PyPI/conda, specified by
+``metadata.name`` in ``setup.cfg`` - is recommended to include both "iris" and
+the plugin name. Continuing this example, its ``setup.cfg`` should include, at
+minimum:
+
+.. code-block:: ini
+
+ [metadata]
+ name = iris-my-plugin
+
+ [options]
+ packages = find_namespace:
+
+ [options.packages.find]
+ where = lib
diff --git a/docs/src/conf.py b/docs/src/conf.py
index 33864c4658..576a099b90 100644
--- a/docs/src/conf.py
+++ b/docs/src/conf.py
@@ -158,8 +158,6 @@ def _dotv(version):
"sphinx_copybutton",
"sphinx.ext.napoleon",
"sphinx_panels",
- # TODO: Spelling extension disabled until the dependencies can be included
- # "sphinxcontrib.spelling",
"sphinx_gallery.gen_gallery",
"matplotlib.sphinxext.mathmpl",
"matplotlib.sphinxext.plot_directive",
@@ -193,16 +191,6 @@ def _dotv(version):
napoleon_use_keyword = True
napoleon_custom_sections = None
-# -- spellingextension --------------------------------------------------------
-# See https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html
-spelling_lang = "en_GB"
-# The lines in this file must only use line feeds (no carriage returns).
-spelling_word_list_filename = ["spelling_allow.txt"]
-spelling_show_suggestions = False
-spelling_show_whole_line = False
-spelling_ignore_importable_modules = True
-spelling_ignore_python_builtins = True
-
# -- copybutton extension -----------------------------------------------------
# See https://sphinx-copybutton.readthedocs.io/en/latest/
copybutton_prompt_text = r">>> |\.\.\. "
@@ -235,6 +223,7 @@ def _dotv(version):
"python": ("https://docs.python.org/3/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/", None),
"pandas": ("https://pandas.pydata.org/docs/", None),
+ "dask": ("https://docs.dask.org/en/stable/", None),
}
# The name of the Pygments (syntax highlighting) style to use.
@@ -310,6 +299,9 @@ def _dotv(version):
],
"use_edit_page_button": True,
"show_toc_level": 1,
+ # Omitted `theme-switcher` below to disable it
+ # Info: https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/light-dark.html#configure-default-theme-mode
+ "navbar_end": ["navbar-icon-links"],
}
rev_parse = run(["git", "rev-parse", "--short", "HEAD"], capture_output=True)
@@ -321,6 +313,9 @@ def _dotv(version):
"github_user": "scitools",
"github_version": "main",
"doc_path": "docs/src",
+ # default theme. Also disabled the button in the html_theme_options.
+ # Info: https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/light-dark.html#configure-default-theme-mode
+ "default_mode": "light",
# custom
"on_rtd": on_rtd,
"rtd_version": rtd_version,
diff --git a/docs/src/developers_guide/contributing_documentation_full.rst b/docs/src/developers_guide/contributing_documentation_full.rst
index ac62a67373..a470def683 100755
--- a/docs/src/developers_guide/contributing_documentation_full.rst
+++ b/docs/src/developers_guide/contributing_documentation_full.rst
@@ -61,7 +61,10 @@ If you wish to run a full clean build you can run::
make clean
make html
-This is useful for a final test before committing your changes.
+This is useful for a final test before committing your changes. Having built
+the documentation, you can view them in your default browser via::
+
+ make show
.. note:: In order to preserve a clean build for the html, all **warnings**
have been promoted to be **errors** to ensure they are addressed.
@@ -84,6 +87,8 @@ pattern matching, e.g.::
pytest -v -k plot_coriolis docs/gallery_tests/test_gallery_examples.py
+If a gallery test fails, follow the instructions in :ref:`testing.graphics`.
+
The ``make`` commands shown below can be run in the ``docs`` or ``docs/src``
directory.
@@ -108,18 +113,6 @@ adding it to the ``linkcheck_ignore`` array that is defined in the
If this fails check the output for the text **broken** and then correct
or ignore the url.
-.. comment
- Finally, the spelling in the documentation can be checked automatically via the
- command::
-
- make spelling
-
- The spelling check may pull up many technical abbreviations and acronyms. This
- can be managed by using an **allow** list in the form of a file. This file,
- or list of files is set in the `conf.py`_ using the string list
- ``spelling_word_list_filename``.
-
-
.. note:: In addition to the automated `Iris GitHub Actions`_ build of all the
documentation build options above, the
https://readthedocs.org/ service is also used. The configuration
@@ -162,13 +155,13 @@ The code for the gallery entries are in ``docs/gallery_code``.
Each sub directory in this directory is a sub section of the gallery. The
respective ``README.rst`` in each folder is included in the gallery output.
-For each gallery entry there must be a corresponding test script located in
-``docs/gallery_tests``.
-
To add an entry to the gallery simple place your python code into the
appropriate sub directory and name it with a prefix of ``plot_``. If your
gallery entry does not fit into any existing sub directories then create a new
-directory and place it in there.
+directory and place it in there. A test for the gallery entry will be
+automatically generated (see Testing_ for how to run it). To add a new
+reference image for this test, follow the instructions in
+:ref:`testing.graphics`.
The reStructuredText (rst) output of the gallery is located in
``docs/src/generated/gallery``.
diff --git a/docs/src/developers_guide/gitwash/forking.rst b/docs/src/developers_guide/gitwash/forking.rst
index 247e3cf678..baeb243c86 100644
--- a/docs/src/developers_guide/gitwash/forking.rst
+++ b/docs/src/developers_guide/gitwash/forking.rst
@@ -18,7 +18,7 @@ Set up and Configure a Github Account
If you don't have a github account, go to the github page, and make one.
You then need to configure your account to allow write access, see
-the `generating sss keys for GitHub`_ help on `github help`_.
+the `generating ssh keys for GitHub`_ help on `github help`_.
Create Your own Forked Copy of Iris
diff --git a/docs/src/developers_guide/gitwash/set_up_fork.rst b/docs/src/developers_guide/gitwash/set_up_fork.rst
index d5c5bc5c44..5318825488 100644
--- a/docs/src/developers_guide/gitwash/set_up_fork.rst
+++ b/docs/src/developers_guide/gitwash/set_up_fork.rst
@@ -15,7 +15,7 @@ Overview
git clone git@github.com:your-user-name/iris.git
cd iris
- git remote add upstream git://github.com/SciTools/iris.git
+ git remote add upstream git@github.com/SciTools/iris.git
In Detail
=========
diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst
index de7aa6c719..bae77a7d21 100644
--- a/docs/src/developers_guide/release.rst
+++ b/docs/src/developers_guide/release.rst
@@ -277,6 +277,11 @@ Post Release Steps
#. On main, make a new ``latest.rst`` from ``latest.rst.template`` and update
the include statement and the toctree in ``index.rst`` to point at the new
``latest.rst``.
+#. Consider updating ``docs/src/userguide/citation.rst`` on ``main`` to include
+ the version number, date and `Zenodo DOI `_
+ of the new release. Ideally this would be updated before the release, but
+ the DOI for the new version is only available once the release has been
+ created in GitHub.
.. _SciTools/iris: https://github.com/SciTools/iris
diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst
index de1afb15af..4c55047d4c 100644
--- a/docs/src/further_topics/metadata.rst
+++ b/docs/src/further_topics/metadata.rst
@@ -389,10 +389,10 @@ instances. Normally, this would cause issues. For example,
.. doctest:: richer-metadata
- >>> simply = {"one": np.int(1), "two": np.array([1.0, 2.0])}
+ >>> simply = {"one": np.int32(1), "two": np.array([1.0, 2.0])}
>>> simply
{'one': 1, 'two': array([1., 2.])}
- >>> fruity = {"one": np.int(1), "two": np.array([1.0, 2.0])}
+ >>> fruity = {"one": np.int32(1), "two": np.array([1.0, 2.0])}
>>> fruity
{'one': 1, 'two': array([1., 2.])}
>>> simply == fruity
@@ -419,7 +419,7 @@ However, metadata class equality is rich enough to handle this eventuality,
>>> metadata1
CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'one': 1, 'two': array([1., 2.])}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),))
- >>> metadata2 = cube.metadata._replace(attributes={"one": np.int(1), "two": np.array([1000.0, 2000.0])})
+ >>> metadata2 = cube.metadata._replace(attributes={"one": np.int32(1), "two": np.array([1000.0, 2000.0])})
>>> metadata2
CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'one': 1, 'two': array([1000., 2000.])}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),))
>>> metadata1 == metadata2
diff --git a/docs/src/further_topics/ugrid/images/orca_grid.png b/docs/src/further_topics/ugrid/images/orca_grid.png
new file mode 100644
index 0000000000..6676e84fbb
Binary files /dev/null and b/docs/src/further_topics/ugrid/images/orca_grid.png differ
diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/further_topics/ugrid/other_meshes.rst
index e6f477624e..38abeeca03 100644
--- a/docs/src/further_topics/ugrid/other_meshes.rst
+++ b/docs/src/further_topics/ugrid/other_meshes.rst
@@ -221,5 +221,140 @@ as the **nodes** when creating the Iris
+
+.. _ORCA_example:
+
+`NEMO`_ data on ORCA tripolar grid
+----------------------------------
+.. figure:: images/orca_grid.png
+ :width: 300
+ :alt: Plot of ORCA-gridded data from NEMO.
+
+NEMO can use various grids, but is frequently used with ORCA type grids.
+ORCA grids store global data in 2-dimensional ny * nx arrays. All cells are
+four-sided. The grids are based on tri-polar layouts, but X and Y spacings are
+irregular and not given by any defined functional forms.
+
+* arrays (ny, nx) of face-located data variables
+* arrays (ny, nx) of X+Y face centre coordinates
+* arrays (ny, nx, 4) of X+Y face corner coordinates
+ (all faces are quadrilaterals)
+
+For simplicity, we treat each face corner as an independent node, and use a face-node
+connectivity which simply lists the nodes in sequence,
+i.e. [[0, 1, 2, 3], [4, 5, 6, 7], ...].
+
+.. Note::
+ This is the simplest solution, but produces approx 4x more nodes than
+ necessary, since the coordinate bounds contain many duplicate locations.
+ Removing the duplicates is quite easy, but often not necessary.
+
+To make an unstructured cube, the data must be 'flattened' to convert the given X and Y
+dimensions into a single mesh dimension. Since Iris cubes don't support a "reshape" or
+"flatten" operations, we create a new cube from the flattened data.
+
+.. dropdown:: :opticon:`code`
+
+ .. code-block:: python
+
+ >>> import numpy as np
+ >>> import iris
+ >>> from iris.coords import AuxCoord, CellMeasure
+ >>> from iris.cube import Cube
+ >>> from iris.experimental.ugrid.mesh import Mesh, Connectivity
+
+
+ >>> filepath = iris.sample_data_path('orca2_votemper.nc')
+ >>> cube = iris.load_cube(filepath)
+ >>> print(cube)
+ sea_water_potential_temperature / (degC) (-- : 148; -- : 180)
+ Auxiliary coordinates:
+ latitude x x
+ longitude x x
+ Scalar coordinates:
+ depth 4.999938 m, bound=(0.0, 10.0) m
+ time 0001-01-01 12:00:00
+ Cell methods:
+ mean time
+ Attributes:
+ Conventions 'CF-1.5'
+
+
+ >>> co_x = cube.coord("longitude")
+ >>> co_y = cube.coord("latitude")
+ >>> ny, nx = co_x.shape
+ >>> n_faces = ny * nx
+
+ >>> # Create face coords from flattened face-points
+ >>> face_x_co = AuxCoord(co_x.points.flatten())
+ >>> face_y_co = AuxCoord(co_y.points.flatten())
+ >>> assert face_x_co.shape == (n_faces,)
+ >>> face_x_co.metadata = co_x.metadata
+ >>> face_y_co.metadata = co_y.metadata
+
+ >>> # Create node coordinates from bound points.
+ >>> n_nodes = n_faces * 4
+ >>> node_x_co = AuxCoord(co_x.bounds.flatten())
+ >>> node_y_co = AuxCoord(co_y.bounds.flatten())
+ >>> assert node_x_co.shape == (n_nodes,)
+ >>> node_x_co.metadata = co_x.metadata
+ >>> node_y_co.metadata = co_y.metadata
+
+ >>> # Create a face-node Connectivity matching the order of nodes in the bounds array
+ >>> face_node_inds = np.arange(n_nodes).reshape((n_faces, 4))
+ >>> face_nodes_conn = Connectivity(
+ ... indices=face_node_inds,
+ ... cf_role='face_node_connectivity',
+ ... long_name='face_inds', units='1',
+ ... )
+
+ >>> # Create a mesh object.
+ >>> mesh = Mesh(
+ ... topology_dimension=2,
+ ... node_coords_and_axes=[(node_x_co, 'x'), (node_y_co, 'y')],
+ ... connectivities=face_nodes_conn,
+ ... face_coords_and_axes=[(face_x_co, 'x'), (face_y_co, 'y')]
+ ... )
+ >>> print(mesh)
+ Mesh : 'unknown'
+ topology_dimension: 2
+ node
+ node_dimension: 'Mesh2d_node'
+ node coordinates
+
+
+ face
+ face_dimension: 'Mesh2d_face'
+ face_node_connectivity:
+ face coordinates
+
+
+
+
+ >>> # Create an unstructured version of the input with flattened data
+ >>> meshcube = Cube(cube.core_data().flatten())
+ >>> meshcube.metadata = cube.metadata
+
+ >>> # Attach the mesh by adding the mesh 'face' MeshCoords into the cube
+ >>> mesh_dim = meshcube.ndim - 1
+ >>> for co in mesh.to_MeshCoords('face'):
+ ... meshcube.add_aux_coord(co, mesh_dim)
+ ...
+
+ >>> print(meshcube)
+ sea_water_potential_temperature / (degC) (-- : 26640)
+ Mesh coordinates:
+ latitude x
+ longitude x
+ Mesh:
+ name unknown
+ location face
+ Cell methods:
+ mean time
+ Attributes:
+ Conventions 'CF-1.5'
+
+
.. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3
.. _FESOM 1.4: https://fesom.de/models/fesom14/
+.. _NEMO: https://www.nemo-ocean.eu/
\ No newline at end of file
diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/further_topics/ugrid/partner_packages.rst
index 8e36f4ffc2..75b54b037f 100644
--- a/docs/src/further_topics/ugrid/partner_packages.rst
+++ b/docs/src/further_topics/ugrid/partner_packages.rst
@@ -1,3 +1,5 @@
+.. include:: ../../common_links.inc
+
.. _ugrid partners:
Iris' Mesh Partner Packages
@@ -97,4 +99,3 @@ Applications
.. _GeoVista: https://github.com/bjlittle/geovista
.. _PyVista: https://docs.pyvista.org/index.html
-.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid
diff --git a/docs/src/index.rst b/docs/src/index.rst
index b9f7faaa03..531c0e0b26 100644
--- a/docs/src/index.rst
+++ b/docs/src/index.rst
@@ -88,6 +88,8 @@ Icons made by `FreePik `_ from
`Flaticon `_
+.. _iris_support:
+
Support
~~~~~~~
@@ -101,7 +103,11 @@ The legacy support resources:
* `Users Google Group `_
* `Developers Google Group `_
-* `Legacy Documentation`_ (Iris 2.4 or earlier)
+* `Legacy Documentation`_ (Iris 2.4 or earlier). This is an archive of zip
+ files of past documentation. You can download, unzip and view the
+ documentation locally (index.html). There may be some incorrect rendering
+ and older javascvript (.js) files may show a warning when uncompressing, in
+ which case we suggest you use a different unzip tool.
.. toctree::
@@ -130,6 +136,15 @@ The legacy support resources:
developers_guide/contributing_getting_involved
+.. toctree::
+ :caption: Community
+ :maxdepth: 1
+ :name: community_index
+ :hidden:
+
+ Community
+
+
.. toctree::
:caption: Iris API
:maxdepth: 1
diff --git a/docs/src/installing.rst b/docs/src/installing.rst
index 6a2d2f6131..b2481973c0 100644
--- a/docs/src/installing.rst
+++ b/docs/src/installing.rst
@@ -14,7 +14,7 @@ Subsystem for Linux). This is a great option to get started with Iris
for users and developers. Be aware that we do not currently test against
any WSL_ distributions.
-.. _WSL: https://docs.microsoft.com/en-us/windows/wsl/install-win10
+.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/install
.. note:: Iris is currently supported and tested against |python_support|
running on Linux. We do not currently actively test on other
diff --git a/docs/src/spelling_allow.txt b/docs/src/spelling_allow.txt
deleted file mode 100644
index ed883ac3bf..0000000000
--- a/docs/src/spelling_allow.txt
+++ /dev/null
@@ -1,361 +0,0 @@
-Admin
-Albers
-Arakawa
-Arg
-Args
-Autoscale
-Biggus
-CF
-CI
-Cartopy
-Checklist
-Color
-Conda
-Constraining
-DAP
-Dask
-Debian
-Duchon
-EO
-Eos
-Exner
-Fieldsfile
-Fieldsfiles
-FillValue
-Gb
-GeogCS
-Hovmoller
-Jul
-Jun
-Jupyter
-Lanczos
-Mappables
-Matplotlib
-Mb
-Modeling
-Mollweide
-NetCDF
-Nino
-PPfield
-PPfields
-Perez
-Proj
-Quickplot
-Regrids
-Royer
-Scitools
-Scitools
-Sep
-Stehfest
-Steroegraphic
-Subsetting
-TestCodeFormat
-TestLicenseHeaders
-Torvalds
-Trans
-Trenberth
-Tri
-URIs
-URLs
-Ubuntu
-Ugrid
-Unidata
-Vol
-Vuuren
-Workflow
-Yury
-Zaytsev
-Zorder
-abf
-abl
-advection
-aggregator
-aggregators
-alphap
-ancils
-antimeridian
-ap
-arg
-args
-arithmetic
-arraylike
-atol
-auditable
-aux
-basemap
-behaviour
-betap
-bhulev
-biggus
-blev
-boolean
-boundpoints
-branchname
-broadcastable
-bugfix
-bugfixes
-builtin
-bulev
-carrée
-cartesian
-celsius
-center
-centrepoints
-cf
-cftime
-chunksizes
-ci
-clabel
-cmap
-cmpt
-codebase
-color
-colorbar
-colorbars
-complevel
-conda
-config
-constraining
-convertor
-coord
-coords
-cs
-datafiles
-datatype
-datetime
-datetimes
-ddof
-deepcopy
-deprecations
-der
-dewpoint
-dict
-dicts
-diff
-discontiguities
-discontiguous
-djf
-docstring
-docstrings
-doi
-dom
-dropdown
-dtype
-dtypes
-dx
-dy
-edgecolor
-endian
-endianness
-equirectangular
-eta
-etc
-fh
-fieldsfile
-fieldsfiles
-fileformat
-fileformats
-filename
-filenames
-filepath
-filespec
-fullname
-func
-geolocations
-github
-gregorian
-grib
-gribapi
-gridcell
-griddata
-gridlines
-hPa
-hashable
-hindcast
-hyperlink
-hyperlinks
-idiff
-ieee
-ifunc
-imagehash
-inc
-init
-inline
-inplace
-int
-interable
-interpolator
-ints
-io
-isosurfaces
-iterable
-jja
-jupyter
-kwarg
-kwargs
-landsea
-lat
-latlon
-latlons
-lats
-lbcode
-lbegin
-lbext
-lbfc
-lbft
-lblrec
-lbmon
-lbmond
-lbnrec
-lbrsvd
-lbtim
-lbuser
-lbvc
-lbyr
-lbyrd
-lh
-lhs
-linewidth
-linted
-linting
-lon
-lons
-lt
-mam
-markup
-matplotlib
-matplotlibrc
-max
-mdtol
-meaned
-mercator
-metadata
-min
-mpl
-nanmask
-nc
-ndarray
-neighbor
-ness
-netCDF
-netcdf
-netcdftime
-nimrod
-np
-nsigma
-numpy
-nx
-ny
-online
-orog
-paramId
-params
-parsable
-pcolormesh
-pdf
-placeholders
-plugin
-png
-proj
-ps
-pseudocolor
-pseudocolour
-pseudocoloured
-py
-pyplot
-quickplot
-rST
-rc
-rd
-reST
-reStructuredText
-rebase
-rebases
-rebasing
-regrid
-regridded
-regridder
-regridders
-regridding
-regrids
-rel
-repo
-repos
-reprojecting
-rh
-rhs
-rst
-rtol
-scipy
-scitools
-seekable
-setup
-sines
-sinh
-spec
-specs
-src
-ssh
-st
-stashcode
-stashcodes
-stats
-std
-stdout
-str
-subcube
-subcubes
-submodule
-submodules
-subsetting
-sys
-tanh
-tb
-testcases
-tgt
-th
-timepoint
-timestamp
-timesteps
-todo
-tol
-tos
-traceback
-travis
-tripolar
-tuple
-tuples
-txt
-udunits
-ufunc
-ugrid
-ukmo
-un
-unhandled
-unicode
-unittest
-unrotate
-unrotated
-uris
-url
-urls
-util
-var
-versioning
-vmax
-vmin
-waypoint
-waypoints
-whitespace
-wildcard
-wildcards
-windspeeds
-withnans
-workflow
-workflows
-xN
-xx
-xxx
-zeroth
-zlev
-zonal
diff --git a/docs/src/userguide/citation.rst b/docs/src/userguide/citation.rst
index 0a3a85fb89..1498b9dfe1 100644
--- a/docs/src/userguide/citation.rst
+++ b/docs/src/userguide/citation.rst
@@ -15,11 +15,12 @@ For example::
@manual{Iris,
author = {{Met Office}},
- title = {Iris: A Python package for analysing and visualising meteorological and oceanographic data sets},
- edition = {v1.2},
- year = {2010 - 2013},
+ title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data },
+ edition = {v3.4},
+ year = {2010 - 2022},
address = {Exeter, Devon },
- url = {http://scitools.org.uk/}
+ url = {http://scitools.org.uk/},
+ doi = {10.5281/zenodo.7386117}
}
@@ -33,7 +34,7 @@ Suggested format::
For example::
- Iris. v1.2. 28-Feb-2013. Met Office. UK. https://github.com/SciTools/iris/archive/v1.2.0.tar.gz 01-03-2013
+ Iris. v3.4. 1-Dec-2022. Met Office. UK. https://doi.org/10.5281/zenodo.7386117 22-12-2022
********************
@@ -46,7 +47,7 @@ Suggested format::
For example::
- Iris. Met Office. git@github.com:SciTools/iris.git 06-03-2013
+ Iris. Met Office. git@github.com:SciTools/iris.git 22-12-2022
.. _How to cite and describe software: https://software.ac.uk/how-cite-software
diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst
index fe9a5d63d2..56a2041bd3 100644
--- a/docs/src/userguide/cube_maths.rst
+++ b/docs/src/userguide/cube_maths.rst
@@ -5,8 +5,8 @@ Cube Maths
==========
-The section :doc:`navigating_a_cube` highlighted that
-every cube has a data attribute;
+The section :doc:`navigating_a_cube` highlighted that
+every cube has a data attribute;
this attribute can then be manipulated directly::
cube.data -= 273.15
@@ -37,7 +37,7 @@ Let's load some air temperature which runs from 1860 to 2100::
filename = iris.sample_data_path('E1_north_america.nc')
air_temp = iris.load_cube(filename, 'air_temperature')
-We can now get the first and last time slices using indexing
+We can now get the first and last time slices using indexing
(see :ref:`cube_indexing` for a reminder)::
t_first = air_temp[0, :, :]
@@ -50,8 +50,8 @@ We can now get the first and last time slices using indexing
t_first = air_temp[0, :, :]
t_last = air_temp[-1, :, :]
-And finally we can subtract the two.
-The result is a cube of the same size as the original two time slices,
+And finally we can subtract the two.
+The result is a cube of the same size as the original two time slices,
but with the data representing their difference:
>>> print(t_last - t_first)
@@ -70,8 +70,8 @@ but with the data representing their difference:
.. note::
- Notice that the coordinates "time" and "forecast_period" have been removed
- from the resultant cube;
+ Notice that the coordinates "time" and "forecast_period" have been removed
+ from the resultant cube;
this is because these coordinates differed between the two input cubes.
@@ -165,18 +165,24 @@ broadcasting behaviour::
>>> print(result.summary(True))
unknown / (K) (time: 240; latitude: 37; longitude: 49)
+
+.. seealso::
+
+ Relevant gallery example:
+ :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py` (Anomaly)
+
Combining Multiple Phenomena to Form a New One
----------------------------------------------
-Combining cubes of potential-temperature and pressure we can calculate
+Combining cubes of potential-temperature and pressure we can calculate
the associated temperature using the equation:
.. math::
-
+
T = \theta (\frac{p}{p_0}) ^ {(287.05 / 1005)}
-Where :math:`p` is pressure, :math:`\theta` is potential temperature,
-:math:`p_0` is the potential temperature reference pressure
+Where :math:`p` is pressure, :math:`\theta` is potential temperature,
+:math:`p_0` is the potential temperature reference pressure
and :math:`T` is temperature.
First, let's load pressure and potential temperature cubes::
@@ -185,7 +191,7 @@ First, let's load pressure and potential temperature cubes::
phenomenon_names = ['air_potential_temperature', 'air_pressure']
pot_temperature, pressure = iris.load_cubes(filename, phenomenon_names)
-In order to calculate :math:`\frac{p}{p_0}` we can define a coordinate which
+In order to calculate :math:`\frac{p}{p_0}` we can define a coordinate which
represents the standard reference pressure of 1000 hPa::
import iris.coords
@@ -199,7 +205,7 @@ the :meth:`iris.coords.Coord.convert_units` method::
p0.convert_units(pressure.units)
-Now we can combine all of this information to calculate the air temperature
+Now we can combine all of this information to calculate the air temperature
using the equation above::
temperature = pot_temperature * ( (pressure / p0) ** (287.05 / 1005) )
@@ -213,12 +219,12 @@ The result could now be plotted using the guidance provided in the
.. only:: html
- A very similar example to this can be found in
+ A very similar example to this can be found in
:ref:`sphx_glr_generated_gallery_meteorology_plot_deriving_phenomena.py`.
.. only:: latex
- A very similar example to this can be found in the examples section,
+ A very similar example to this can be found in the examples section,
with the title "Deriving Exner Pressure and Air Temperature".
.. _cube_maths_combining_units:
@@ -243,7 +249,7 @@ unit (if ``a`` had units ``'m2'`` then ``a ** 0.5`` would result in a cube
with units ``'m'``).
Iris inherits units from `cf_units `_
-which in turn inherits from `UDUNITS `_.
+which in turn inherits from `UDUNITS `_.
As well as the units UDUNITS provides, cf units also provides the units
``'no-unit'`` and ``'unknown'``. A unit of ``'no-unit'`` means that the
associated data is not suitable for describing with a unit, cf units
diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst
index 980f1e132f..08297c2a51 100644
--- a/docs/src/userguide/cube_statistics.rst
+++ b/docs/src/userguide/cube_statistics.rst
@@ -4,6 +4,11 @@
Cube Statistics
===============
+.. seealso::
+
+ Relevant gallery example:
+ :ref:`sphx_glr_generated_gallery_general_plot_zonal_means.py` (Collapsing)
+
.. _cube-statistics-collapsing:
Collapsing Entire Data Dimensions
diff --git a/docs/src/userguide/glossary.rst b/docs/src/userguide/glossary.rst
new file mode 100644
index 0000000000..818ef0c7ad
--- /dev/null
+++ b/docs/src/userguide/glossary.rst
@@ -0,0 +1,210 @@
+.. _glossary:
+
+Glossary
+=============
+
+.. glossary::
+
+ Cartopy
+ A python package for producing maps, and other geospatial data.
+ Allows plotting on these maps, over a range of projections.
+
+ | **Related:** :term:`Matplotlib`
+ | **More information:** `CartoPy Site `_
+ |
+
+ CF Conventions
+ Rules for storing meteorological Climate and Forecast data in
+ :term:`NetCDF Format` files, defining a standard metadata format to
+ describe what the data is.
+ This also forms the data model which iris is based on.
+
+ | **Related:** :term:`NetCDF Format`
+ | **More information:** `CF Conventions `_
+ |
+
+ Coordinate
+ A container for data points, comes in three main flavours.
+
+ - Dimensional Coordinate -
+ A coordinate that describes a single data dimension of a cube.
+ They can only contain numerical values, in a sorted order (ascending
+ or descending).
+ - Auxiliary Coordinate -
+ A coordinate that can map to multiple data dimensions. Can
+ contain any type of data.
+ - Scalar Coordinate -
+ A coordinate that is not mapped to any data dimension, instead
+ representing the cube as a whole.
+
+ | **Related:** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Cube
+ Cubes are the main method of storing data in Iris. A cube can consist of:
+
+ - Array of :term:`Phenomenon` Data (Required)
+ - :term:`Coordinates `
+ - :term:`Standard Name`
+ - :term:`Long Name`
+ - :term:`Unit`
+ - :term:`Cell Methods `
+ - :term:`Coordinate Factories `
+
+ | **Related:** :term:`NumPy`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Cell Method
+ A cell method represents that a cube's data has been derived from
+ a past statistical operation, such as a
+ MEAN or SUM operation.
+
+ | **Related:** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Coordinate Factory
+ A coordinate factory derives coordinates (sometimes referred to as
+ derived coordinates) from the values of existing coordinates.
+ E.g. A hybrid height factory might use "height above sea level"
+ and "height at ground level" coordinate data to calculate a
+ "height above ground level" coordinate.
+
+ | **Related:** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+
+ Dask
+ A data analytics python library. Iris predominantly uses Dask Arrays;
+ a collection of NumPy-esque arrays. The data is operated in batches,
+ so that not all data is in RAM at once.
+
+ | **Related:** :term:`Lazy Data` **|** :term:`NumPy`
+ | **More information:** :doc:`real_and_lazy_data`
+ |
+
+ Fields File (FF) Format
+ A meteorological file format, the output of the Unified Model.
+
+ | **Related:** :term:`GRIB Format`
+ **|** :term:`Post Processing (PP) Format` **|** :term:`NetCDF Format`
+ | **More information:** `Unified Model `_
+ |
+
+ GRIB Format
+ A WMO-standard meteorological file format.
+
+ | **Related:** :term:`Fields File (FF) Format`
+ **|** :term:`Post Processing (PP) Format` **|** :term:`NetCDF Format`
+ | **More information:** `GRIB 1 User Guide `_
+ **|** `GRIB 2 User Guide.pdf `_
+ |
+
+ Lazy Data
+ Data stored in hard drive, and then temporarily loaded into RAM in
+ batches when needed. Allows of less memory usage and faster performance,
+ thanks to parallel processing.
+
+ | **Related:** :term:`Dask` **|** :term:`Real Data`
+ | **More information:** :doc:`real_and_lazy_data`
+ |
+
+ Long Name
+ A name describing a :term:`phenomenon`, not limited to the
+ the same restraints as :term:`standard name`.
+
+ | **Related:** :term:`Standard Name` **|** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Matplotlib
+ A python package for plotting and projecting data in a wide variety
+ of formats.
+
+ | **Related:** :term:`CartoPy` **|** :term:`NumPy`
+ | **More information:** `Matplotlib `_
+ |
+
+ Metadata
+ The information which describes a phenomenon.
+ Within Iris specifically, all information which
+ distinguishes one phenomenon from another,
+ e.g. :term:`units ` or :term:`Cell Methods | `
+
+ | **Related:** :term:`Phenomenon` **|** :term:`Cube`
+ | **More information:** :doc:`../further_topics/metadata`
+ |
+
+ NetCDF Format
+ A flexible file format for storing multi-dimensional array-like data.
+ When Iris loads this format, it also especially recognises and interprets data
+ encoded according to the :term:`CF Conventions`.
+
+ | **Related:** :term:`Fields File (FF) Format`
+ **|** :term:`GRIB Format` **|** :term:`Post Processing (PP) Format`
+ | **More information:** `NetCDF-4 Python Git `_
+ |
+
+ NumPy
+ A mathematical Python library, predominantly based around
+ multi-dimensional arrays.
+
+ | **Related:** :term:`Dask` **|** :term:`Cube`
+ **|** :term:`Xarray`
+ | **More information:** `NumPy.org `_
+ |
+
+ Phenomenon
+ The primary data which is measured, usually within a cube, e.g.
+ air temperature.
+
+ | **Related:** :term:`Metadata`
+ **|** :term:`Standard Name` **|** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Post Processing (PP) Format
+ A meteorological file format, created from a post processed
+ :term:`Fields File (FF) Format`.
+
+ | **Related:** :term:`GRIB Format` **|** :term:`NetCDF Format`
+ | **More information:** `PP Wikipedia Page `_
+ |
+
+ Real Data
+ Data that has been loaded into RAM, as opposed to sitting
+ on the hard drive.
+
+ | **Related:** :term:`Lazy Data` **|** :term:`NumPy`
+ | **More information:** :doc:`real_and_lazy_data`
+ |
+
+ Standard Name
+ A name describing a :term:`phenomenon`, one from a fixed list
+ defined at `CF Standard Names `_.
+
+ | **Related:** :term:`Long Name` **|** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Unit
+ The unit with which the :term:`phenomenon` is measured e.g. m / sec.
+
+ | **Related:** :term:`Cube`
+ | **More information:** :doc:`iris_cubes`
+ |
+
+ Xarray
+ A python library for sophisticated labelled multi-dimensional operations.
+ Has a broader scope than Iris - it is not focused on meteorological data.
+
+ | **Related:** :term:`NumPy`
+ | **More information:** `Xarray Documentation `_
+ |
+
+----
+
+`To top `_
diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst
index 08923e7662..fdd0c4d03e 100644
--- a/docs/src/userguide/index.rst
+++ b/docs/src/userguide/index.rst
@@ -35,6 +35,7 @@ they may serve as a useful reference for future exploration.
cube_maths
citation
code_maintenance
+ glossary
.. toctree::
diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/userguide/merge_and_concat.rst
index 08c3ce9711..b521d49a59 100644
--- a/docs/src/userguide/merge_and_concat.rst
+++ b/docs/src/userguide/merge_and_concat.rst
@@ -253,6 +253,11 @@ which are described below.
Using CubeList.concatenate
==========================
+.. seealso::
+
+ Relevant gallery example:
+ :ref:`sphx_glr_generated_gallery_general_plot_projections_and_annotations.py` (Brief concatenating examples)
+
The :meth:`CubeList.concatenate ` method operates on a list
of cubes and returns a new :class:`~iris.cube.CubeList` containing the cubes
that have been concatenated.
diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst
index c5924a61c6..b4c16b094b 100644
--- a/docs/src/userguide/navigating_a_cube.rst
+++ b/docs/src/userguide/navigating_a_cube.rst
@@ -110,6 +110,10 @@ cube with the :attr:`Cube.cell_methods ` attribute:
print(cube.cell_methods)
+.. seealso::
+
+ Relevant gallery example:
+ :ref:`sphx_glr_generated_gallery_meteorology_plot_wind_barbs.py`
Accessing Coordinates on the Cube
---------------------------------
@@ -176,6 +180,10 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord`_.
+the derived "pressure" coordinate for certain data [#f1]_ from the ECMWF.
.. [#f1] Where the level type is either 105 or 119, and where the
surface pressure has an ECMWF paramId of
diff --git a/docs/src/whatsnew/3.3.rst b/docs/src/whatsnew/3.3.rst
index 5812b79860..c2e47f298a 100644
--- a/docs/src/whatsnew/3.3.rst
+++ b/docs/src/whatsnew/3.3.rst
@@ -31,6 +31,36 @@ This document explains the changes made to Iris for this release
any issues or feature requests for improving Iris. Enjoy!
+v3.3.1 (29 Sep 2022)
+====================
+
+.. dropdown:: :opticon:`alert` v3.3.1 Patches
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+
+ The patches in this release of Iris include:
+
+ #. `@pp-mo`_ fixed the Jupyter notebook display of :class:`~iris.cube.CubeList`.
+ (:issue:`4973`, :pull:`4976`)
+
+ #. `@pp-mo`_ fixed a bug in NAME loaders where data with no associated statistic would
+ load as a cube with invalid cell-methods, which cannot be printed or saved to netcdf.
+ (:issue:`3288`, :pull:`4933`)
+
+ #. `@pp-mo`_ ensured that :data:`iris.cube.Cube.cell_methods` must always be an iterable
+ of :class:`iris.coords.CellMethod` objects (:pull:`4933`).
+
+ #. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's
+ change to default Transverse Mercator projection affects an Iris test.
+ See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more
+ details. (:pull:`4992`)
+
+ #. `@trexfeathers`_ introduced the ``netcdf4!=1.6.1`` pin to avoid a
+ problem with segfaults. (:pull:`4992`)
+
+
📢 Announcements
================
@@ -339,3 +369,5 @@ This document explains the changes made to Iris for this release
.. _PyData Sphinx Theme: https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html
.. _pytest: https://docs.pytest.org
.. _setuptools-scm: https://github.com/pypa/setuptools_scm
+.. _SciTools/cartopy@fcb784d: https://github.com/SciTools/cartopy/commit/fcb784daa65d95ed9a74b02ca292801c02bc4108
+.. _SciTools/cartopy@8860a81: https://github.com/SciTools/cartopy/commit/8860a8186d4dc62478e74c83f3b2b3e8f791372e
diff --git a/docs/src/whatsnew/3.4.rst b/docs/src/whatsnew/3.4.rst
new file mode 100644
index 0000000000..1ad676c049
--- /dev/null
+++ b/docs/src/whatsnew/3.4.rst
@@ -0,0 +1,288 @@
+.. include:: ../common_links.inc
+
+v3.4 (01 Dec 2022)
+******************
+
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
+
+
+.. dropdown:: :opticon:`report` v3.4.0 Release Highlights
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The highlights for this minor release of Iris include:
+
+ * We have **archived older Iris documentation** - everything before
+ ``v3.0.0`` - so older versions will soon no longer appear in search
+ engines. If you need this older documentation: please
+ see :ref:`iris_support`.
+ * We have added a :ref:`glossary` to the Iris documentation.
+ * We have completed work to make **Pandas interoperability** handle
+ n-dimensional :class:`~iris.cube.Cube`\s.
+ * We have **begun refactoring Iris' regridding**, which has already improved
+ performance and functionality, with more potential in future!
+ * We have made several other significant `🚀 Performance Enhancements`_.
+ * Please note that **Iris cannot currently work with the latest NetCDF4
+ releases**. The pin is set to ``` if you have
+ any issues or feature requests for improving Iris. Enjoy!
+
+
+📢 Announcements
+================
+
+#. Welcome to `@ESadek-MO`_, `@TTV-Intrepid`_ and `@hsteptoe`_, who made their
+ first contributions to Iris 🎉
+
+ .. _try_experimental_stratify:
+
+#. Do you enjoy `python-stratify`_? Did you know that Iris includes a
+ convenience for using `python-stratify`_ with :class:`~iris.cube.Cube`\s?
+ It has been 'experimental' for several years now, without receiving much
+ feedback, so it's **use it or lose it** time: please try out
+ :mod:`iris.experimental.stratify` and let us know what you think!
+
+
+✨ Features
+===========
+
+#. `@ESadek-MO`_ edited :func:`~iris.io.expand_filespecs` to allow expansion of
+ non-existing paths, and added expansion functionality to :func:`~iris.io.save`.
+ (:issue:`4772`, :pull:`4913`)
+
+#. `@trexfeathers`_ and `Julian Heming`_ added new mappings between CF
+ standard names and UK Met Office LBFC codes. (:pull:`4859`)
+
+#. `@pp-mo`_ changed the metadata of a face/edge-type
+ :class:`~iris.experimental.ugrid.mesh.MeshCoord`, to be same as the face/edge
+ coordinate in the mesh from which it takes its ``.points``. Previously, all MeshCoords
+ took their metadata from the node coord, but only a node-type MeshCoord now does
+ that. Also, the MeshCoord ``.var_name`` is now that of the underlying coord, whereas
+ previously this was always None. These changes make MeshCoord more like an ordinary
+ :class:`~iris.coords.AuxCoord`, which avoids some specific known usage problems.
+ (:issue:`4860`, :pull:`5020`)
+
+#. `@Esadek-MO`_ and `@trexfeathers`_ added dim coord
+ prioritisation to ``_get_lon_lat_coords()`` in :mod:`iris.analysis.cartography`.
+ This allows :func:`iris.analysis.cartography.area_weights` and
+ :func:`~iris.analysis.cartography.project` to handle cubes which contain
+ both dim and aux coords of the same type e.g. ``longitude`` and ``grid_longitude``.
+ (:issue:`3916`, :pull:`5029`).
+
+#. `@stephenworsley`_ added the ability to regrid derived coordinates with the
+ :obj:`~iris.analysis.PointInCell` regridding scheme. (:pull:`4807`)
+
+#. `@trexfeathers`_ made NetCDF loading more tolerant by enabling skipping of
+ :class:`~iris.coords.DimCoord`\s, :class:`~iris.coords.AuxCoord`\s,
+ :class:`~iris.coords.CellMeasure`\s and
+ :class:`~iris.coords.AncillaryVariable`\s if they cannot be added to a
+ :class:`~iris.cube.Cube` (e.g. due to CF non-compliance). This is done via
+ a new error class: :class:`~iris.exceptions.CannotAddError` (subclass of
+ :class:`ValueError`). (:pull:`5054`)
+
+#. `@pp-mo`_ implemented == and != comparisons for :class:`~iris.Constraint` s.
+ A simple constraint is now == to another one constructed in the same way.
+ However, equality is limited for more complex cases : value-matching functions must
+ be the same identical function, and for &-combinations order is significant,
+ i.e. ``(c1 & c2) != (c2 & c1)``.
+ (:issue:`3616`, :pull:`3749`).
+
+#. `@hsteptoe`_ and `@trexfeathers`_ improved
+ :func:`iris.pandas.as_data_frame`\'s conversion of :class:`~iris.cube.Cube`\s to
+ :class:`~pandas.DataFrame`\s. This includes better handling of multiple
+ :class:`~iris.cube.Cube` dimensions, auxiliary coordinates and attribute
+ information. **Note:** the improvements are opt-in, by setting the
+ :obj:`iris.FUTURE.pandas_ndim` flag (see :class:`iris.Future` for more).
+ (:issue:`4526`, :pull:`4909`, :pull:`4669`, :pull:`5059`, :pull:`5074`)
+
+
+🐛 Bugs Fixed
+=============
+
+#. `@rcomer`_ and `@pp-mo`_ (reviewer) factored masking into the returned
+ sum-of-weights calculation from :obj:`~iris.analysis.SUM`. (:pull:`4905`)
+
+#. `@schlunma`_ fixed a bug which prevented using
+ :meth:`iris.cube.Cube.collapsed` on coordinates whose number of bounds
+ differs from 0 or 2. This enables the use of this method on mesh
+ coordinates. (:issue:`4672`, :pull:`4870`)
+
+#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) fixed the building of the CF
+ Standard Names module ``iris.std_names`` for the ``setup.py`` commands
+ ``develop`` and ``std_names``. (:issue:`4951`, :pull:`4952`)
+
+#. `@lbdreyer`_ and `@pp-mo`_ (reviewer) fixed the cube print out such that
+ scalar ancillary variables are displayed in a dedicated section rather than
+ being added to the vector ancillary variables section. Further, ancillary
+ variables and cell measures that map to a cube dimension of length 1 are now
+ included in the respective vector sections. (:pull:`4945`)
+
+#. `@rcomer`_ removed some old redundant code that prevented determining the
+ order of time cells. (:issue:`4697`, :pull:`4729`)
+
+#. `@stephenworsley`_ improved the accuracy of the error messages for
+ :meth:`~iris.cube.Cube.coord` when failing to find coordinates in the case where
+ a coordinate is given as the argument. Similarly, improved the error messages for
+ :meth:`~iris.cube.Cube.cell_measure` and :meth:`~iris.cube.Cube.ancillary_variable`.
+ (:issue:`4898`, :pull:`4928`)
+
+#. `@stephenworsley`_ fixed a bug which caused derived coordinates to be realised
+ after calling :meth:`iris.cube.Cube.aggregated_by`. (:issue:`3637`, :pull:`4947`)
+
+#. `@rcomer`_ corrected the ``standard_name`` mapping from UM stash code ``m01s30i311``
+ to indicate that this is the upward, rather than northward part of the flow.
+ (:pull:`5060`)
+
+#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) fixed an issue which prevented
+ uncompressed PP fields with additional trailing padded words in the field
+ data to be loaded and saved. (:pull:`5058`)
+
+#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) fixed the handling of data when
+ regridding with :class:`~iris.analysis.UnstructuredNearest` or calling
+ :func:`~iris.analysis.trajectory.interpolate` such that the data type and mask is
+ preserved. (:issue:`4463`, :pull:`5062`)
+
+
+💣 Incompatible Changes
+=======================
+
+#. `@trexfeathers`_ altered testing to accept new Dask copying behaviour from
+ `dask/dask#9555`_ - copies of a Dask array created using ``da.from_array()``
+ will all ``compute()`` to a shared identical array. So creating a
+ :class:`~iris.cube.Cube` using ``Cube(data=da.from_array(...``, then
+ using :class:`~iris.cube.Cube` :meth:`~iris.cube.Cube.copy`,
+ will produce two :class:`~iris.cube.Cube`\s that both return an identical
+ array when requesting :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.data`.
+ We do not expect this to affect typical user workflows but please get in
+ touch if you need help. (:pull:`5041`)
+
+#. `@trexfeathers`_ moved ``iris.experimental.animate.animate()`` to
+ :func:`iris.plot.animate`, in recognition of its successful use over several
+ years since introduction. (:pull:`5056`)
+
+
+🚀 Performance Enhancements
+===========================
+
+#. `@rcomer`_ and `@pp-mo`_ (reviewer) increased aggregation speed for
+ :obj:`~iris.analysis.SUM`, :obj:`~iris.analysis.COUNT` and
+ :obj:`~iris.analysis.PROPORTION` on real data. (:pull:`4905`)
+
+#. `@bouweandela`_ made :meth:`iris.coords.Coord.cells` faster for time
+ coordinates. This also affects :meth:`iris.cube.Cube.extract`,
+ :meth:`iris.cube.Cube.subset`, and :meth:`iris.coords.Coord.intersect`.
+ (:pull:`4969`)
+
+#. `@bouweandela`_ improved the speed of :meth:`iris.cube.Cube.subset` /
+ :meth:`iris.coords.Coord.intersect`.
+ (:pull:`4955`)
+
+#. `@stephenworsley`_ improved the speed of the :obj:`~iris.analysis.PointInCell`
+ regridding scheme. (:pull:`4807`)
+
+
+🔥 Deprecations
+===============
+
+#. `@hsteptoe`_ and `@trexfeathers`_ (reviewer) deprecated
+ :func:`iris.pandas.as_series` in favour of the new
+ :func:`iris.pandas.as_data_frame` - see `✨ Features`_ for more details.
+ (:pull:`4669`)
+
+
+🔗 Dependencies
+===============
+
+#. `@rcomer`_ introduced the ``dask >=2.26`` minimum pin, so that Iris can benefit
+ from Dask's support for `NEP13`_ and `NEP18`_. (:pull:`4905`)
+
+#. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's
+ change to default Transverse Mercator projection affects an Iris test.
+ See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more
+ details.
+ (:pull:`4968`)
+
+#. `@trexfeathers`_ introduced the ``netcdf4<1.6.1`` pin to avoid a problem
+ with segfaults. (:pull:`4968`, :pull:`5075`, :issue:`5016`)
+
+#. `@trexfeathers`_ updated the Matplotlib colormap registration in
+ :mod:`iris.palette` in response to a deprecation warning. Using the new
+ Matplotlib API also means a ``matplotlib>=3.5`` pin. (:pull:`4998`)
+
+#. See `💣 Incompatible Changes`_ for notes about `dask/dask#9555`_.
+
+
+📚 Documentation
+================
+
+#. `@ESadek-MO`_, `@TTV-Intrepid`_ and `@trexfeathers`_ added a gallery example for zonal
+ means plotted parallel to a cartographic plot. (:pull:`4871`)
+
+#. `@Esadek-MO`_ added a key-terms :ref:`glossary` page into the user guide. (:pull:`4902`)
+
+#. `@pp-mo`_ added a :ref:`code example `
+ for converting ORCA-gridded data to an unstructured cube. (:pull:`5013`)
+#. `@Esadek-MO`_ added links to relevant Gallery examples within the User Guide
+ to improve understanding. (:pull:`5009`)
+
+#. `@trexfeathers`_ changed the warning header for the **latest** documentation
+ to reference Read the Docs' built-in version switcher, instead of generating
+ its own independent links. (:pull:`5055`)
+
+#. `@tkknight`_ updated the links for the Iris documentation to v2.4 and
+ earlier to point to the archive of zip files instead. (:pull:`5064`)
+
+#. `@Esadek-MO`_ began adding notes at the bottom of functions to
+ to clarify if the function preserves laziness or not. See :issue:`3292` for
+ the ongoing checklist. (:pull:`5066`)
+
+💼 Internal
+===========
+
+#. `@rcomer`_ removed the obsolete ``setUpClass`` method from Iris testing.
+ (:pull:`4927`)
+
+#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) removed support for
+ ``python setup.py test``, which is a deprecated approach to executing
+ package tests, see `pypa/setuptools#1684`_. Also performed assorted
+ ``setup.py`` script hygiene. (:pull:`4948`, :pull:`4949`, :pull:`4950`)
+
+#. `@pp-mo`_ split the module :mod:`iris.fileformats.netcdf` into separate
+ :mod:`~iris.fileformats.netcdf.loader` and :mod:`~iris.fileformats.netcdf.saver`
+ submodules, just to make the code easier to handle.
+
+#. `@trexfeathers`_ adapted the benchmark for importing :mod:`iris.palette` to
+ cope with new colormap behaviour in Matplotlib `v3.6`. (:pull:`4998`)
+
+#. `@rcomer`_ removed a now redundant workaround for an old matplotlib bug,
+ highlighted by :issue:`4090`. (:pull:`4999`)
+
+#. `@rcomer`_ added the ``show`` option to the documentation Makefiles, as a
+ convenient way for contributors to view their built documentation.
+ (:pull:`5000`)
+
+.. comment
+ Whatsnew author names (@github name) in alphabetical order. Note that,
+ core dev names are automatically included by the common_links.inc:
+
+.. _@TTV-Intrepid: https://github.com/TTV-Intrepid
+.. _Julian Heming: https://www.metoffice.gov.uk/research/people/julian-heming
+.. _@hsteptoe: https://github.com/hsteptoe
+
+
+.. comment
+ Whatsnew resources in alphabetical order:
+
+.. _NEP13: https://numpy.org/neps/nep-0013-ufunc-overrides.html
+.. _NEP18: https://numpy.org/neps/nep-0018-array-function-protocol.html
+.. _pypa/setuptools#1684: https://github.com/pypa/setuptools/issues/1684
+.. _SciTools/cartopy@fcb784d: https://github.com/SciTools/cartopy/commit/fcb784daa65d95ed9a74b02ca292801c02bc4108
+.. _SciTools/cartopy@8860a81: https://github.com/SciTools/cartopy/commit/8860a8186d4dc62478e74c83f3b2b3e8f791372e
+.. _dask/dask#9555: https://github.com/dask/dask/pull/9555
diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst
index 8cff21f32f..005fac70c4 100644
--- a/docs/src/whatsnew/index.rst
+++ b/docs/src/whatsnew/index.rst
@@ -12,6 +12,7 @@ What's New in Iris
:hidden:
latest.rst
+ 3.4.rst
3.3.rst
3.2.rst
3.1.rst
diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst
index d2eadb17d6..a38e426e6a 100644
--- a/docs/src/whatsnew/latest.rst
+++ b/docs/src/whatsnew/latest.rst
@@ -16,7 +16,7 @@ This document explains the changes made to Iris for this release
The highlights for this major/minor release of Iris include:
- * N/A
+ * We're so proud to fully support `@ed-hawkins`_ and `#ShowYourStripes`_ ❤️
And finally, get in touch with us on :issue:`GitHub` if you have
any issues or feature requests for improving Iris. Enjoy!
@@ -25,37 +25,22 @@ This document explains the changes made to Iris for this release
📢 Announcements
================
-#. Welcome to `@ESadek-MO`_ and `@TTV-Intrepid`_ who made their first contributions to Iris 🎉
+#. Congratulations to `@ESadek-MO`_ who has become a core developer for Iris! 🎉
+#. Welcome and congratulations to `@HGWright`_ for making his first contribution to Iris! 🎉
✨ Features
===========
-#. `@ESadek-MO`_ edited :func:`~iris.io.expand_filespecs` to allow expansion of
- non-existing paths, and added expansion functionality to :func:`~iris.io.save`.
- (:issue:`4772`, :pull:`4913`)
+#. `@bsherratt`_ added support for plugins - see the corresponding
+ :ref:`documentation page` for further information.
+ (:pull:`5144`)
🐛 Bugs Fixed
=============
-#. `@rcomer`_ and `@pp-mo`_ (reviewer) factored masking into the returned
- sum-of-weights calculation from :obj:`~iris.analysis.SUM`. (:pull:`4905`)
-
-#. `@schlunma`_ fixed a bug which prevented using
- :meth:`iris.cube.Cube.collapsed` on coordinates whose number of bounds
- differs from 0 or 2. This enables the use of this method on mesh
- coordinates. (:issue:`4672`, :pull:`4870`)
-
-#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) fixed the building of the CF
- Standard Names module ``iris.std_names`` for the ``setup.py`` commands
- ``develop`` and ``std_names``. (:issue:`4951`, :pull:`4952`)
-
-#. `@lbdreyer`_ and `@pp-mo`_ (reviewer) fixed the cube print out such that
- scalar ancillary variables are displayed in a dedicated section rather than
- being added to the vector ancillary variables section. Further, ancillary
- variables and cell measures that map to a cube dimension of length 1 are now
- included in the respective vector sections. (:pull:`4945`)
+#. N/A
💣 Incompatible Changes
@@ -67,9 +52,7 @@ This document explains the changes made to Iris for this release
🚀 Performance Enhancements
===========================
-#. `@rcomer`_ and `@pp-mo`_ (reviewer) increased aggregation speed for
- :obj:`~iris.analysis.SUM`, :obj:`~iris.analysis.COUNT` and
- :obj:`~iris.analysis.PROPORTION` on real data. (:pull:`4905`)
+#. N/A
🔥 Deprecations
@@ -81,53 +64,60 @@ This document explains the changes made to Iris for this release
🔗 Dependencies
===============
-#. `@rcomer`_ introduced the ``dask >=2.26`` minimum pin, so that Iris can benefit
- from Dask's support for `NEP13`_ and `NEP18`_. (:pull:`4905`)
-#. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's
- change to default Transverse Mercator projection affects an Iris test.
- See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more
- details.
- (:pull:`4968`)
-#. `@trexfeathers`_ introduced the ``netcdf4!=1.6.1`` pin to avoid a problem
- with segfaults. (:pull:`4968`)
+#. N/A
📚 Documentation
================
-#. `@ESadek-MO`_, `@TTV-Intrepid`_ and `@trexfeathers`_ added a gallery example for zonal
- means plotted parallel to a cartographic plot. (:pull:`4871`)
+#. `@rcomer`_ clarified instructions for updating gallery tests. (:pull:`5100`)
+#. `@tkknight`_ unpinned ``pydata-sphinx-theme`` and set the default to use
+ the light version (not dark) while we make the docs dark mode friendly
+ (:pull:`5129`)
+
+#. `@jonseddon`_ updated the citation to a more recent version of Iris. (:pull:`5116`)
+
+#. `@rcomer`_ linked the :obj:`~iris.analysis.PERCENTILE` aggregator from the
+ :obj:`~iris.analysis.MEDIAN` docstring, noting that the former handles lazy
+ data. (:pull:`5128`)
+#. `@trexfeathers`_ updated the WSL link to Microsoft's latest documentation,
+ and removed an ECMWF link in the ``v1.0`` What's New that was failing the
+ linkcheck CI. (:pull:`5109`)
+
+#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section,
+ as a one-stop place to find out about getting involved, and how we relate
+ to other projects. (:pull:`5025`)
+
+#. The **Iris community**, with help from the **Xarray community**, produced
+ the :doc:`/community/iris_xarray` page, highlighting the similarities and
+ differences between the two packages. (:pull:`5025`)
+
+#. `@bjlittle`_ added a new section to the `README.md`_ to show our support
+ for the outstanding work of `@ed-hawkins`_ et al for `#ShowYourStripes`_.
+ (:pull:`5141`)
+
+#. `@HGWright`_ fixed some typo's from Gitwash. (:pull:`5145`)
💼 Internal
===========
-#. `@rcomer`_ removed the obsolete ``setUpClass`` method from Iris testing.
- (:pull:`4927`)
+#. `@fnattino`_ changed the order of ``ncgen`` arguments in the command to
+ create NetCDF files for testing (caused errors on OS X). (:pull:`5105`)
-#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) removed support for
- ``python setup.py test``, which is a deprecated approach to executing
- package tests, see `pypa/setuptools#1684`_. Also performed assorted
- ``setup.py`` script hygiene. (:pull:`4948`, :pull:`4949`, :pull:`4950`)
-
-#. `@pp-mo`_ split the module :mod:`iris.fileformats.netcdf` into separate
- :mod:`~iris.fileformats.netcdf.loader` and :mod:`~iris.fileformats.netcdf.saver`
- submodules, just to make the code easier to handle.
+#. `@rcomer`_ removed some old infrastructure that printed test timings.
+ (:pull:`5101`)
.. comment
Whatsnew author names (@github name) in alphabetical order. Note that,
core dev names are automatically included by the common_links.inc:
-.. _@TTV-Intrepid: https://github.com/TTV-Intrepid
-
-
+.. _@fnattino: https://github.com/fnattino
+.. _@ed-hawkins: https://github.com/ed-hawkins
.. comment
Whatsnew resources in alphabetical order:
-.. _NEP13: https://numpy.org/neps/nep-0013-ufunc-overrides.html
-.. _NEP18: https://numpy.org/neps/nep-0018-array-function-protocol.html
-.. _pypa/setuptools#1684: https://github.com/pypa/setuptools/issues/1684
-.. _SciTools/cartopy@fcb784d: https://github.com/SciTools/cartopy/commit/fcb784daa65d95ed9a74b02ca292801c02bc4108
-.. _SciTools/cartopy@8860a81: https://github.com/SciTools/cartopy/commit/8860a8186d4dc62478e74c83f3b2b3e8f791372e
\ No newline at end of file
+.. _#ShowYourStripes: https://showyourstripes.info/s/globe/
+.. _README.md: https://github.com/SciTools/iris#-----
diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template
index 661ee47f50..a0ce415a65 100644
--- a/docs/src/whatsnew/latest.rst.template
+++ b/docs/src/whatsnew/latest.rst.template
@@ -109,4 +109,3 @@ NOTE: section above is a template for bugfix patches
.. comment
Whatsnew resources in alphabetical order:
-
diff --git a/docs/src/why_iris.rst b/docs/src/why_iris.rst
index 63a515f68e..82b791b4bd 100644
--- a/docs/src/why_iris.rst
+++ b/docs/src/why_iris.rst
@@ -40,5 +40,4 @@ Interoperability with packages from the wider scientific Python ecosystem comes
from Iris' use of standard NumPy/dask arrays as its underlying data storage.
Iris is part of SciTools, for more information see https://scitools.org.uk/.
-For **Iris 2.4** and earlier documentation please see the
-:link-badge:`https://scitools.org.uk/iris/docs/v2.4.0/,"legacy documentation",cls=badge-info text-white`.
+For **Iris 2.4** and earlier documentation please see :ref:`iris_support`.
\ No newline at end of file
diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py
index b944f9b22f..38465472ee 100644
--- a/lib/iris/__init__.py
+++ b/lib/iris/__init__.py
@@ -91,6 +91,7 @@ def callback(cube, field, filename):
import contextlib
import glob
+import importlib
import itertools
import os.path
import pathlib
@@ -129,6 +130,7 @@ def callback(cube, field, filename):
"sample_data_path",
"save",
"site_configuration",
+ "use_plugin",
]
@@ -140,22 +142,28 @@ def callback(cube, field, filename):
class Future(threading.local):
"""Run-time configuration controller."""
- def __init__(self, datum_support=False):
+ def __init__(self, datum_support=False, pandas_ndim=False):
"""
A container for run-time options controls.
To adjust the values simply update the relevant attribute from
within your code. For example::
+ # example_future_flag is a fictional example.
iris.FUTURE.example_future_flag = False
If Iris code is executed with multiple threads, note the values of
these options are thread-specific.
- .. note::
-
- iris.FUTURE.example_future_flag does not exist. It is provided
- as an example.
+ Parameters
+ ----------
+ datum_support : bool, default=False
+ Opts in to loading coordinate system datum information from NetCDF
+ files into :class:`~iris.coord_systems.CoordSystem`\\ s, wherever
+ this information is present.
+ pandas_ndim : bool, default=False
+ See :func:`iris.pandas.as_data_frame` for details - opts in to the
+ newer n-dimensional behaviour.
"""
# The flag 'example_future_flag' is provided as a reference for the
@@ -166,13 +174,13 @@ def __init__(self, datum_support=False):
#
# self.__dict__['example_future_flag'] = example_future_flag
self.__dict__["datum_support"] = datum_support
+ self.__dict__["pandas_ndim"] = pandas_ndim
def __repr__(self):
-
# msg = ('Future(example_future_flag={})')
# return msg.format(self.example_future_flag)
- msg = "Future(datum_support={})"
- return msg.format(self.datum_support)
+ msg = "Future(datum_support={}, pandas_ndim={})"
+ return msg.format(self.datum_support, self.pandas_ndim)
# deprecated_options = {'example_future_flag': 'warning',}
deprecated_options = {}
@@ -211,14 +219,11 @@ def context(self, **kwargs):
statement, the previous state is restored.
For example::
+
+ # example_future_flag is a fictional example.
with iris.FUTURE.context(example_future_flag=False):
# ... code that expects some past behaviour
- .. note::
-
- iris.FUTURE.example_future_flag does not exist and is
- provided only as an example.
-
"""
# Save the current context
current_state = self.__dict__.copy()
@@ -467,3 +472,22 @@ def sample_data_path(*path_to_join):
"appropriate for general file access.".format(target)
)
return target
+
+
+def use_plugin(plugin_name):
+ """
+ Convenience function to import a plugin
+
+ For example::
+
+ use_plugin("my_plugin")
+
+ is equivalent to::
+
+ import iris.plugins.my_plugin
+
+ This is useful for plugins that are not used directly, but instead do all
+ their setup on import. In this case, style checkers would not know the
+ significance of the import statement and warn that it is an unused import.
+ """
+ importlib.import_module(f"iris.plugins.{plugin_name}")
diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py
index 4e23793e1d..bfd4865f56 100644
--- a/lib/iris/_constraints.py
+++ b/lib/iris/_constraints.py
@@ -131,6 +131,30 @@ def latitude_bands(cell):
_CoordConstraint(coord_name, coord_thing)
)
+ def __eq__(self, other):
+ # Equivalence is defined, but is naturally limited for any Constraints
+ # based on callables, i.e. "cube_func", or value functions for
+ # attributes/names/coords : These can only be == if they contain the
+ # *same* callable object (i.e. same object identity).
+ eq = (
+ type(other) == Constraint
+ and self._name == other._name
+ and self._cube_func == other._cube_func
+ and self._coord_constraints == other._coord_constraints
+ )
+ # NOTE: theoretically, you could compare coord constraints as a *set*,
+ # as order should not affect matching.
+ # Not totally sure, so for now let's not.
+ return eq
+
+ def __hash__(self):
+ # We want constraints to have hashes, so they can act as e.g.
+ # dictionary keys or tuple elements.
+ # So, we *must* provide this, as overloading '__eq__' automatically
+ # disables it.
+ # Just use basic object identity.
+ return id(self)
+
def __repr__(self):
args = []
if self._name:
@@ -218,6 +242,19 @@ def __init__(self, lhs, rhs, operator):
self.rhs = rhs_constraint
self.operator = operator
+ def __eq__(self, other):
+ eq = (
+ type(other) == ConstraintCombination
+ and self.lhs == other.lhs
+ and self.rhs == other.rhs
+ and self.operator == other.operator
+ )
+ return eq
+
+ def __hash__(self):
+ # Must re-define if you overload __eq__ : Use object identity.
+ return id(self)
+
def _coordless_match(self, cube):
return self.operator(
self.lhs._coordless_match(cube), self.rhs._coordless_match(cube)
@@ -261,6 +298,18 @@ def __repr__(self):
self._coord_thing,
)
+ def __eq__(self, other):
+ eq = (
+ type(other) == _CoordConstraint
+ and self.coord_name == other.coord_name
+ and self._coord_thing == other._coord_thing
+ )
+ return eq
+
+ def __hash__(self):
+ # Must re-define if you overload __eq__ : Use object identity.
+ return id(self)
+
def extract(self, cube):
"""
Returns the the column based indices of the given cube which
@@ -493,6 +542,17 @@ def __init__(self, **attributes):
self._attributes = attributes
super().__init__(cube_func=self._cube_func)
+ def __eq__(self, other):
+ eq = (
+ type(other) == AttributeConstraint
+ and self._attributes == other._attributes
+ )
+ return eq
+
+ def __hash__(self):
+ # Must re-define if you overload __eq__ : Use object identity.
+ return id(self)
+
def _cube_func(self, cube):
match = True
for name, value in self._attributes.items():
@@ -577,6 +637,17 @@ def __init__(
self._names = ("standard_name", "long_name", "var_name", "STASH")
super().__init__(cube_func=self._cube_func)
+ def __eq__(self, other):
+ eq = type(other) == NameConstraint and all(
+ getattr(self, attname) == getattr(other, attname)
+ for attname in self._names
+ )
+ return eq
+
+ def __hash__(self):
+ # Must re-define if you overload __eq__ : Use object identity.
+ return id(self)
+
def _cube_func(self, cube):
def matcher(target, value):
if callable(value):
diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py
index ac7ae34511..e0566fc8f2 100644
--- a/lib/iris/_lazy_data.py
+++ b/lib/iris/_lazy_data.py
@@ -39,7 +39,7 @@ def is_lazy_data(data):
"""
Return whether the argument is an Iris 'lazy' data array.
- At present, this means simply a Dask array.
+ At present, this means simply a :class:`dask.array.Array`.
We determine this by checking for a "compute" property.
"""
@@ -67,7 +67,8 @@ def _optimum_chunksize_internals(
* shape (tuple of int):
The full array shape of the target data.
* limit (int):
- The 'ideal' target chunk size, in bytes. Default from dask.config.
+ The 'ideal' target chunk size, in bytes. Default from
+ :mod:`dask.config`.
* dtype (np.dtype):
Numpy dtype of target data.
@@ -77,7 +78,7 @@ def _optimum_chunksize_internals(
.. note::
The purpose of this is very similar to
- `dask.array.core.normalize_chunks`, when called as
+ :func:`dask.array.core.normalize_chunks`, when called as
`(chunks='auto', shape, dtype=dtype, previous_chunks=chunks, ...)`.
Except, the operation here is optimised specifically for a 'c-like'
dimension order, i.e. outer dimensions first, as for netcdf variables.
@@ -174,13 +175,13 @@ def _optimum_chunksize(
def as_lazy_data(data, chunks=None, asarray=False):
"""
- Convert the input array `data` to a dask array.
+ Convert the input array `data` to a :class:`dask.array.Array`.
Args:
* data (array-like):
An indexable object with 'shape', 'dtype' and 'ndim' properties.
- This will be converted to a dask array.
+ This will be converted to a :class:`dask.array.Array`.
Kwargs:
@@ -192,7 +193,7 @@ def as_lazy_data(data, chunks=None, asarray=False):
Set to False (default) to pass passed chunks through unchanged.
Returns:
- The input array converted to a dask array.
+ The input array converted to a :class:`dask.array.Array`.
.. note::
The result chunk size is a multiple of 'chunks', if given, up to the
@@ -284,15 +285,16 @@ def multidim_lazy_stack(stack):
"""
Recursively build a multidimensional stacked dask array.
- This is needed because dask.array.stack only accepts a 1-dimensional list.
+ This is needed because :meth:`dask.array.Array.stack` only accepts a
+ 1-dimensional list.
Args:
* stack:
- An ndarray of dask arrays.
+ An ndarray of :class:`dask.array.Array`.
Returns:
- The input array converted to a lazy dask array.
+ The input array converted to a lazy :class:`dask.array.Array`.
"""
if stack.ndim == 0:
diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py
index bc12080523..5ca5f31a8e 100644
--- a/lib/iris/_merge.py
+++ b/lib/iris/_merge.py
@@ -1418,6 +1418,7 @@ def _define_space(self, space, positions, indexes, function_matrix):
participates in a functional relationship.
"""
+
# Heuristic reordering of coordinate defintion indexes into
# preferred dimension order.
def axis_and_name(name):
diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py
index 11810f2901..f34cda1402 100644
--- a/lib/iris/analysis/__init__.py
+++ b/lib/iris/analysis/__init__.py
@@ -296,7 +296,6 @@ def _dimensional_metadata_comparison(*cubes, object_get=None):
# for coordinate groups
for cube, coords in zip(cubes, all_coords):
for coord in coords:
-
# if this coordinate has already been processed, then continue on
# to the next one
if id(coord) in processed_coords:
@@ -1778,7 +1777,7 @@ def interp_order(length):
.. seealso:: The :func:`~iris.analysis.PROPORTION` aggregator.
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -1808,7 +1807,7 @@ def interp_order(length):
result = precip_cube.collapsed('time', iris.analysis.MAX_RUN,
function=lambda values: values > 10)
-This aggregator handles masked data, which it treats as interrupting a run.
+This aggregator handles masked data, which it treats as interrupting a run, and lazy data.
"""
MAX_RUN.name = lambda: "max_run"
@@ -1826,7 +1825,7 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.GMEAN)
-This aggregator handles masked data.
+This aggregator handles masked data, but NOT lazy data.
"""
@@ -1848,7 +1847,7 @@ def interp_order(length):
The harmonic mean is only valid if all data values are greater
than zero.
-This aggregator handles masked data.
+This aggregator handles masked data, but NOT lazy data.
"""
@@ -1914,7 +1913,8 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.MEDIAN)
-This aggregator handles masked data.
+This aggregator handles masked data, but NOT lazy data. For lazy aggregation,
+please try :obj:`~.PERCENTILE`.
"""
@@ -1933,7 +1933,7 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.MIN)
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -1952,7 +1952,7 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.MAX)
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -1978,7 +1978,7 @@ def interp_order(length):
result = cube.collapsed('time', iris.analysis.PEAK)
-This aggregator handles masked data.
+This aggregator handles masked data but NOT lazy data.
"""
@@ -2058,7 +2058,7 @@ def interp_order(length):
.. seealso:: The :func:`~iris.analysis.COUNT` aggregator.
-This aggregator handles masked data.
+This aggregator handles masked data, but NOT lazy data.
"""
@@ -2084,7 +2084,7 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.RMS)
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -2118,7 +2118,7 @@ def interp_order(length):
.. note::
- Lazy operation is supported, via :func:`dask.array.nanstd`.
+ Lazy operation is supported, via :func:`dask.array.std`.
This aggregator handles masked data.
@@ -2157,7 +2157,7 @@ def interp_order(length):
result = cube.rolling_window('time', iris.analysis.SUM,
len(weights), weights=weights)
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -2192,9 +2192,9 @@ def interp_order(length):
.. note::
- Lazy operation is supported, via :func:`dask.array.nanvar`.
+ Lazy operation is supported, via :func:`dask.array.var`.
-This aggregator handles masked data.
+This aggregator handles masked data and lazy data.
"""
@@ -2226,6 +2226,11 @@ def interp_order(length):
:func:`scipy.interpolate.interp1d` Defaults to "linear", which is
equivalent to alphap=0.5, betap=0.5 in `iris.analysis.PERCENTILE`
+Notes
+------
+This function does not maintain laziness when called; it realises data.
+See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
@@ -2619,6 +2624,11 @@ def clear_phenomenon_identity(cube):
Helper function to clear the standard_name, attributes, and
cell_methods of a cube.
+
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
cube.rename(None)
cube.attributes.clear()
diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py
index 8381185e58..3b728e9a43 100644
--- a/lib/iris/analysis/_area_weighted.py
+++ b/lib/iris/analysis/_area_weighted.py
@@ -11,7 +11,7 @@
from iris._lazy_data import map_complete_blocks
from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid
-from iris.analysis._regrid import RectilinearRegridder
+from iris.analysis._regrid import RectilinearRegridder, _create_cube
import iris.analysis.cartography
import iris.coord_systems
from iris.util import _meshgrid
@@ -853,7 +853,7 @@ def _calculate_regrid_area_weighted_weights(
cached_x_bounds = []
cached_x_indices = []
max_x_indices = 0
- for (x_0, x_1) in grid_x_bounds:
+ for x_0, x_1 in grid_x_bounds:
if grid_x_decreasing:
x_0, x_1 = x_1, x_0
x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1)
@@ -1111,18 +1111,32 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform(
)
# Wrap up the data as a Cube.
- regrid_callback = RectilinearRegridder._regrid
- new_cube = RectilinearRegridder._create_cube(
+
+ _regrid_callback = functools.partial(
+ RectilinearRegridder._regrid,
+ src_x_coord=src_x,
+ src_y_coord=src_y,
+ sample_grid_x=meshgrid_x,
+ sample_grid_y=meshgrid_y,
+ )
+ # TODO: investigate if an area weighted callback would be more appropriate.
+ # _regrid_callback = functools.partial(
+ # _regrid_area_weighted_array,
+ # weights_info=weights_info,
+ # index_info=index_info,
+ # mdtol=mdtol,
+ # )
+
+ def regrid_callback(*args, **kwargs):
+ _data, dims = args
+ return _regrid_callback(_data, *dims, **kwargs)
+
+ new_cube = _create_cube(
new_data,
src_cube,
- src_x_dim,
- src_y_dim,
- src_x,
- src_y,
- grid_x,
- grid_y,
- meshgrid_x,
- meshgrid_y,
+ [src_x_dim, src_y_dim],
+ [grid_x, grid_y],
+ 2,
regrid_callback,
)
diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py
index 0b52f54568..4cb449ae51 100644
--- a/lib/iris/analysis/_grid_angles.py
+++ b/lib/iris/analysis/_grid_angles.py
@@ -449,6 +449,11 @@ def rotate_grid_vectors(
Vector magnitudes will always be the same as the inputs.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
u_out, v_out = (cube.copy() for cube in (u_cube, v_cube))
if not grid_angles_cube:
diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py
index 2a7dfa6e62..f5e89a9e51 100644
--- a/lib/iris/analysis/_interpolation.py
+++ b/lib/iris/analysis/_interpolation.py
@@ -268,7 +268,7 @@ def _account_for_circular(self, points, data):
"""
from iris.analysis.cartography import wrap_lons
- for (circular, modulus, index, dim, offset) in self._circulars:
+ for circular, modulus, index, dim, offset in self._circulars:
if modulus:
# Map all the requested values into the range of the source
# data (centred over the centre of the source data to allow
diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py
index 5c7439b0ce..f1891a48e4 100644
--- a/lib/iris/analysis/_regrid.py
+++ b/lib/iris/analysis/_regrid.py
@@ -11,7 +11,6 @@
import numpy as np
import numpy.ma as ma
from scipy.sparse import csc_matrix
-from scipy.sparse import diags as sparse_diags
from iris._lazy_data import map_complete_blocks
from iris.analysis._interpolation import (
@@ -21,7 +20,7 @@
snapshot_grid,
)
from iris.analysis._scipy_interpolate import _RegularGridInterpolator
-from iris.util import _meshgrid
+from iris.util import _meshgrid, guess_coord_axis
def _transform_xy_arrays(crs_from, x, y, crs_to):
@@ -52,18 +51,20 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare(
First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'.
Check inputs and calculate the sparse regrid matrix and related info.
- The 'regrid info' returned can be re-used over many 2d slices.
+ The 'regrid info' returned can be re-used over many cubes.
"""
- if src_cube.aux_factories:
- msg = "All source cube derived coordinates will be ignored."
- warnings.warn(msg)
# Get the source cube x and y 2D auxiliary coordinates.
sx, sy = src_cube.coord(axis="x"), src_cube.coord(axis="y")
# Get the target grid cube x and y dimension coordinates.
tx, ty = get_xy_dim_coords(grid_cube)
+ sl = [0] * grid_cube.ndim
+ sl[grid_cube.coord_dims(tx)[0]] = np.s_[:]
+ sl[grid_cube.coord_dims(ty)[0]] = np.s_[:]
+ grid_cube = grid_cube[tuple(sl)]
+
if sx.units != sy.units:
msg = (
"The source cube x ({!r}) and y ({!r}) coordinates must "
@@ -287,83 +288,108 @@ def _regrid_indices(cells, depth, points):
return regrid_info
-def _regrid_weighted_curvilinear_to_rectilinear__perform(
- src_cube, regrid_info
+def _curvilinear_to_rectilinear_regrid_data(
+ data,
+ dims,
+ regrid_info,
):
"""
- Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'.
+ Part of 'regrid_weighted_curvilinear_to_rectilinear' which acts on the data.
- Perform the prepared regrid calculation on a single 2d cube.
+ Perform the prepared regrid calculation on an array.
"""
- from iris.cube import Cube
-
sparse_matrix, sum_weights, rows, grid_cube = regrid_info
+ inds = list(range(-len(dims), 0))
+ data = np.moveaxis(data, dims, inds)
+ data_shape = data.shape
+ grid_size = np.prod([data_shape[ind] for ind in inds])
+
# Calculate the numerator of the weighted mean (M, 1).
- is_masked = ma.isMaskedArray(src_cube.data)
+ is_masked = ma.isMaskedArray(data)
+ sum_weights = None
if not is_masked:
- data = src_cube.data
+ data = data
else:
# Use raw data array
- data = src_cube.data.data
+ r_data = data.data
# Check if there are any masked source points to take account of.
- is_masked = np.ma.is_masked(src_cube.data)
+ is_masked = ma.is_masked(data)
if is_masked:
# Zero any masked source points so they add nothing in output sums.
- mask = src_cube.data.mask
- data[mask] = 0.0
+ mask = data.mask
+ r_data[mask] = 0.0
# Calculate a new 'sum_weights' to allow for missing source points.
# N.B. it is more efficient to use the original once-calculated
# sparse matrix, but in this case we can't.
# Hopefully, this post-multiplying by the validities is less costly
# than repeating the whole sparse calculation.
- valid_src_cells = ~mask.flat[:]
- src_cell_validity_factors = sparse_diags(
- np.array(valid_src_cells, dtype=int), 0
- )
- valid_weights = sparse_matrix * src_cell_validity_factors
- sum_weights = valid_weights.sum(axis=1).getA()
- # Work out where output cells are missing all contributions.
- # This allows for where 'rows' contains output cells that have no
- # data because of missing input points.
- zero_sums = sum_weights == 0.0
- # Make sure we can still divide by sum_weights[rows].
- sum_weights[zero_sums] = 1.0
+ valid_src_cells = ~mask.reshape(-1, grid_size)
+ sum_weights = valid_src_cells @ sparse_matrix.T
+ data = r_data
+ if sum_weights is None:
+ sum_weights = (
+ np.ones(data_shape).reshape(-1, grid_size) @ sparse_matrix.T
+ )
+ # Work out where output cells are missing all contributions.
+ # This allows for where 'rows' contains output cells that have no
+ # data because of missing input points.
+ zero_sums = sum_weights == 0.0
+ # Make sure we can still divide by sum_weights[rows].
+ sum_weights[zero_sums] = 1.0
# Calculate sum in each target cell, over contributions from each source
# cell.
- numerator = sparse_matrix * data.reshape(-1, 1)
-
- # Create a template for the weighted mean result.
- weighted_mean = ma.masked_all(numerator.shape, dtype=numerator.dtype)
-
- # Calculate final results in all relevant places.
- weighted_mean[rows] = numerator[rows] / sum_weights[rows]
- if is_masked:
- # Ensure masked points where relevant source cells were all missing.
- if np.any(zero_sums):
- # Make masked if it wasn't.
- weighted_mean = np.ma.asarray(weighted_mean)
- # Mask where contributing sums were zero.
- weighted_mean[zero_sums] = np.ma.masked
-
- # Construct the final regridded weighted mean cube.
+ numerator = data.reshape(-1, grid_size) @ sparse_matrix.T
+
+ weighted_mean = numerator / sum_weights
+ # Ensure masked points where relevant source cells were all missing.
+ weighted_mean = ma.asarray(weighted_mean)
+ if np.any(zero_sums):
+ # Mask where contributing sums were zero.
+ weighted_mean[zero_sums] = ma.masked
+
+ new_data_shape = list(data_shape)
+ for dim, length in zip(inds, grid_cube.shape):
+ new_data_shape[dim] = length
+ if len(dims) == 1:
+ new_data_shape.append(grid_cube.shape[1])
+ dims = (dims[0], dims[0] + 1)
+ if len(dims) > 2:
+ new_data_shape = new_data_shape[: 2 - len(dims)]
+ dims = dims[:2]
+
+ result = weighted_mean.reshape(new_data_shape)
+ result = np.moveaxis(result, [-2, -1], dims)
+ return result
+
+
+def _regrid_weighted_curvilinear_to_rectilinear__perform(
+ src_cube, regrid_info
+):
+ """
+ Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'.
+
+ Perform the prepared regrid calculation on a single cube.
+
+ """
+ dims = src_cube.coord_dims(
+ CurvilinearRegridder._get_horizontal_coord(src_cube, "x")
+ )
+ result_data = _curvilinear_to_rectilinear_regrid_data(
+ src_cube.data, dims, regrid_info
+ )
+ grid_cube = regrid_info[-1]
tx = grid_cube.coord(axis="x", dim_coords=True)
ty = grid_cube.coord(axis="y", dim_coords=True)
- (tx_dim,) = grid_cube.coord_dims(tx)
- (ty_dim,) = grid_cube.coord_dims(ty)
- dim_coords_and_dims = list(zip((ty.copy(), tx.copy()), (ty_dim, tx_dim)))
- cube = Cube(
- weighted_mean.reshape(grid_cube.shape),
- dim_coords_and_dims=dim_coords_and_dims,
+ regrid_callback = functools.partial(
+ _curvilinear_to_rectilinear_regrid_data, regrid_info=regrid_info
)
- cube.metadata = copy.deepcopy(src_cube.metadata)
-
- for coord in src_cube.coords(dimensions=()):
- cube.add_aux_coord(coord.copy())
-
- return cube
+ result = _create_cube(
+ result_data, src_cube, dims, (ty.copy(), tx.copy()), 2, regrid_callback
+ )
+ return result
class CurvilinearRegridder:
@@ -457,7 +483,7 @@ def __call__(self, src):
point-in-cell regridding.
"""
- from iris.cube import Cube, CubeList
+ from iris.cube import Cube
# Validity checks.
if not isinstance(src, Cube):
@@ -473,30 +499,18 @@ def __call__(self, src):
"The given cube is not defined on the same "
"source grid as this regridder."
)
-
- # Call the regridder function.
- # This includes repeating over any non-XY dimensions, because the
- # underlying routine does not support this.
- # FOR NOW: we will use cube.slices and merge to achieve this,
- # though that is not a terribly efficient method ...
- # TODO: create a template result cube and paste data slices into it,
- # which would be more efficient.
- result_slices = CubeList([])
- for slice_cube in src.slices(sx):
- if self._regrid_info is None:
- # Calculate the basic regrid info just once.
- self._regrid_info = (
- _regrid_weighted_curvilinear_to_rectilinear__prepare(
- slice_cube, self.weights, self._target_cube
- )
- )
- slice_result = (
- _regrid_weighted_curvilinear_to_rectilinear__perform(
- slice_cube, self._regrid_info
+ slice_cube = next(src.slices(sx))
+ if self._regrid_info is None:
+ # Calculate the basic regrid info just once.
+ self._regrid_info = (
+ _regrid_weighted_curvilinear_to_rectilinear__prepare(
+ slice_cube, self.weights, self._target_cube
)
)
- result_slices.append(slice_result)
- result = result_slices.merge_cube()
+ result = _regrid_weighted_curvilinear_to_rectilinear__perform(
+ src, self._regrid_info
+ )
+
return result
@@ -688,11 +702,23 @@ def _regrid(
# Prepare the result data array
shape = list(src_data.shape)
- assert shape[x_dim] == src_x_coord.shape[0]
- assert shape[y_dim] == src_y_coord.shape[0]
-
- shape[y_dim] = sample_grid_x.shape[0]
- shape[x_dim] = sample_grid_x.shape[1]
+ final_shape = shape.copy()
+ if x_dim is not None:
+ assert shape[x_dim] == src_x_coord.shape[0]
+ shape[x_dim] = sample_grid_x.shape[1]
+ final_shape[x_dim] = shape[x_dim]
+ else:
+ shape.append(1)
+ x_dim = len(shape) - 1
+ src_data = np.expand_dims(src_data, -1)
+ if y_dim is not None:
+ assert shape[y_dim] == src_y_coord.shape[0]
+ shape[y_dim] = sample_grid_x.shape[0]
+ final_shape[y_dim] = shape[y_dim]
+ else:
+ shape.append(1)
+ y_dim = len(shape) - 1
+ src_data = np.expand_dims(src_data, -1)
dtype = src_data.dtype
if method == "linear":
@@ -714,7 +740,11 @@ def _regrid(
if src_x_coord.points.size > 1
else False
)
- reverse_y = src_y_coord.points[0] > src_y_coord.points[1]
+ reverse_y = (
+ src_y_coord.points[0] > src_y_coord.points[1]
+ if src_y_coord.points.size > 1
+ else False
+ )
flip_index = [slice(None)] * src_data.ndim
if reverse_x:
src_x_coord = src_x_coord[::-1]
@@ -733,7 +763,7 @@ def _regrid(
# Slice out the first full 2D piece of data for construction of the
# interpolator.
- index = [0] * src_data.ndim
+ index = [0] * len(shape)
index[x_dim] = index[y_dim] = slice(None)
initial_data = src_data[tuple(index)]
if y_dim < x_dim:
@@ -808,166 +838,21 @@ def interpolate(data):
if ma.isMaskedArray(data) or mode.force_mask:
# NB. np.ma.getmaskarray returns an array of `False` if
# `src_subset` is not a masked array.
- src_mask = np.ma.getmaskarray(src_subset)
+ src_mask = ma.getmaskarray(src_subset)
interpolator.fill_value = mode.mask_fill_value
mask_fraction = interpolate(src_mask)
new_mask = mask_fraction > 0
- if np.ma.isMaskedArray(data):
+ if ma.isMaskedArray(data):
data.mask[tuple(index)] = new_mask
elif np.any(new_mask):
# Set mask=False to ensure we have an expanded mask array.
- data = np.ma.MaskedArray(data, mask=False)
+ data = ma.MaskedArray(data, mask=False)
data.mask[tuple(index)] = new_mask
+ data = data.reshape(final_shape)
return data
- @staticmethod
- def _create_cube(
- data,
- src,
- x_dim,
- y_dim,
- src_x_coord,
- src_y_coord,
- grid_x_coord,
- grid_y_coord,
- sample_grid_x,
- sample_grid_y,
- regrid_callback,
- ):
- """
- Return a new Cube for the result of regridding the source Cube onto
- the new grid.
-
- All the metadata and coordinates of the result Cube are copied from
- the source Cube, with two exceptions:
- - Grid dimension coordinates are copied from the grid Cube.
- - Auxiliary coordinates which span the grid dimensions are
- ignored, except where they provide a reference surface for an
- :class:`iris.aux_factory.AuxCoordFactory`.
-
- Args:
-
- * data:
- The regridded data as an N-dimensional NumPy array.
- * src:
- The source Cube.
- * x_dim:
- The X dimension within the source Cube.
- * y_dim:
- The Y dimension within the source Cube.
- * src_x_coord:
- The X :class:`iris.coords.DimCoord`.
- * src_y_coord:
- The Y :class:`iris.coords.DimCoord`.
- * grid_x_coord:
- The :class:`iris.coords.DimCoord` for the new grid's X
- coordinate.
- * grid_y_coord:
- The :class:`iris.coords.DimCoord` for the new grid's Y
- coordinate.
- * sample_grid_x:
- A 2-dimensional array of sample X values.
- * sample_grid_y:
- A 2-dimensional array of sample Y values.
- * regrid_callback:
- The routine that will be used to calculate the interpolated
- values of any reference surfaces.
-
- Returns:
- The new, regridded Cube.
-
- """
- from iris.cube import Cube
-
- #
- # XXX: At the moment requires to be a static method as used by
- # experimental regrid_area_weighted_rectilinear_src_and_grid
- #
- # Create a result cube with the appropriate metadata
- result = Cube(data)
- result.metadata = copy.deepcopy(src.metadata)
-
- # Copy across all the coordinates which don't span the grid.
- # Record a mapping from old coordinate IDs to new coordinates,
- # for subsequent use in creating updated aux_factories.
- coord_mapping = {}
-
- def copy_coords(src_coords, add_method):
- for coord in src_coords:
- dims = src.coord_dims(coord)
- if coord == src_x_coord:
- coord = grid_x_coord
- elif coord == src_y_coord:
- coord = grid_y_coord
- elif x_dim in dims or y_dim in dims:
- continue
- result_coord = coord.copy()
- add_method(result_coord, dims)
- coord_mapping[id(coord)] = result_coord
-
- copy_coords(src.dim_coords, result.add_dim_coord)
- copy_coords(src.aux_coords, result.add_aux_coord)
-
- def regrid_reference_surface(
- src_surface_coord,
- surface_dims,
- x_dim,
- y_dim,
- src_x_coord,
- src_y_coord,
- sample_grid_x,
- sample_grid_y,
- regrid_callback,
- ):
- # Determine which of the reference surface's dimensions span the X
- # and Y dimensions of the source cube.
- surface_x_dim = surface_dims.index(x_dim)
- surface_y_dim = surface_dims.index(y_dim)
- surface = regrid_callback(
- src_surface_coord.points,
- surface_x_dim,
- surface_y_dim,
- src_x_coord,
- src_y_coord,
- sample_grid_x,
- sample_grid_y,
- )
- surface_coord = src_surface_coord.copy(surface)
- return surface_coord
-
- # Copy across any AuxFactory instances, and regrid their reference
- # surfaces where required.
- for factory in src.aux_factories:
- for coord in factory.dependencies.values():
- if coord is None:
- continue
- dims = src.coord_dims(coord)
- if x_dim in dims and y_dim in dims:
- result_coord = regrid_reference_surface(
- coord,
- dims,
- x_dim,
- y_dim,
- src_x_coord,
- src_y_coord,
- sample_grid_x,
- sample_grid_y,
- regrid_callback,
- )
- result.add_aux_coord(result_coord, dims)
- coord_mapping[id(coord)] = result_coord
- try:
- result.add_aux_factory(factory.updated(coord_mapping))
- except KeyError:
- msg = (
- "Cannot update aux_factory {!r} because of dropped"
- " coordinates.".format(factory.name())
- )
- warnings.warn(msg)
- return result
-
def _check_units(self, coord):
from iris.coord_systems import GeogCS, RotatedGeogCS
@@ -1089,20 +974,168 @@ def __call__(self, src):
)
# Wrap up the data as a Cube.
- regrid_callback = functools.partial(
- self._regrid, method=self._method, extrapolation_mode="nan"
+ _regrid_callback = functools.partial(
+ self._regrid,
+ src_x_coord=src_x_coord,
+ src_y_coord=src_y_coord,
+ sample_grid_x=sample_grid_x,
+ sample_grid_y=sample_grid_y,
+ method=self._method,
+ extrapolation_mode="nan",
)
- result = self._create_cube(
+
+ def regrid_callback(*args, **kwargs):
+ _data, dims = args
+ return _regrid_callback(_data, *dims, **kwargs)
+
+ result = _create_cube(
data,
src,
- x_dim,
- y_dim,
- src_x_coord,
- src_y_coord,
- grid_x_coord,
- grid_y_coord,
- sample_grid_x,
- sample_grid_y,
+ [x_dim, y_dim],
+ [grid_x_coord, grid_y_coord],
+ 2,
regrid_callback,
)
return result
+
+
+def _create_cube(
+ data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback
+):
+ r"""
+ Return a new cube for the result of regridding.
+ Returned cube represents the result of regridding the source cube
+ onto the horizontal coordinates (e.g. latitude) of the target cube.
+ All the metadata and coordinates of the result cube are copied from
+ the source cube, with two exceptions:
+ - Horizontal coordinates are copied from the target cube.
+ - Auxiliary coordinates which span the grid dimensions are
+ ignored.
+ Parameters
+ ----------
+ data : array
+ The regridded data as an N-dimensional NumPy array.
+ src : cube
+ The source Cube.
+ src_dims : tuple of int
+ The dimensions of the X and Y coordinate within the source Cube.
+ tgt_coords : tuple of :class:`iris.coords.Coord`\\ 's
+ Either two 1D :class:`iris.coords.DimCoord`\\ 's, two 1D
+ :class:`iris.experimental.ugrid.DimCoord`\\ 's or two ND
+ :class:`iris.coords.AuxCoord`\\ 's representing the new grid's
+ X and Y coordinates.
+ num_tgt_dims : int
+ The number of dimensions that the `tgt_coords` span.
+ regrid_callback : callable
+ The routine that will be used to calculate the interpolated
+ values of any reference surfaces.
+ Returns
+ -------
+ cube
+ A new iris.cube.Cube instance.
+ """
+ from iris.coords import DimCoord
+ from iris.cube import Cube
+
+ result = Cube(data)
+
+ if len(src_dims) >= 2:
+ grid_dim_x, grid_dim_y = src_dims[:2]
+ elif len(src_dims) == 1:
+ grid_dim_x = src_dims[0]
+ grid_dim_y = grid_dim_x + 1
+
+ if num_tgt_dims == 1:
+ grid_dim_x = grid_dim_y = min(src_dims)
+ for tgt_coord, dim in zip(tgt_coords, (grid_dim_x, grid_dim_y)):
+ if len(tgt_coord.shape) == 1:
+ if isinstance(tgt_coord, DimCoord) and dim is not None:
+ result.add_dim_coord(tgt_coord, dim)
+ else:
+ result.add_aux_coord(tgt_coord, dim)
+ else:
+ result.add_aux_coord(tgt_coord, (grid_dim_y, grid_dim_x))
+
+ result.metadata = copy.deepcopy(src.metadata)
+
+ # Copy across all the coordinates which don't span the grid.
+ # Record a mapping from old coordinate IDs to new coordinates,
+ # for subsequent use in creating updated aux_factories.
+
+ coord_mapping = {}
+
+ def copy_coords(src_coords, add_method):
+ for coord in src_coords:
+ dims = src.coord_dims(coord)
+ if set(src_dims).intersection(set(dims)):
+ continue
+ if guess_coord_axis(coord) in ["X", "Y"]:
+ continue
+
+ def dim_offset(dim):
+ offset = sum(
+ [
+ d <= dim
+ for d in (grid_dim_x, grid_dim_y)
+ if d is not None
+ ]
+ )
+ if offset and num_tgt_dims == 1:
+ offset -= 1
+ offset -= sum([d <= dim for d in src_dims if d is not None])
+ return dim + offset
+
+ dims = [dim_offset(dim) for dim in dims]
+ result_coord = coord.copy()
+ # Add result_coord to the owner of add_method.
+ add_method(result_coord, dims)
+ coord_mapping[id(coord)] = result_coord
+
+ copy_coords(src.dim_coords, result.add_dim_coord)
+ copy_coords(src.aux_coords, result.add_aux_coord)
+
+ def regrid_reference_surface(
+ src_surface_coord,
+ surface_dims,
+ src_dims,
+ regrid_callback,
+ ):
+ # Determine which of the reference surface's dimensions span the X
+ # and Y dimensions of the source cube.
+ relative_surface_dims = [
+ surface_dims.index(dim) if dim is not None else None
+ for dim in src_dims
+ ]
+ surface = regrid_callback(
+ src_surface_coord.points,
+ relative_surface_dims,
+ )
+ surface_coord = src_surface_coord.copy(surface)
+ return surface_coord
+
+ # Copy across any AuxFactory instances, and regrid their reference
+ # surfaces where required.
+ for factory in src.aux_factories:
+ for coord in factory.dependencies.values():
+ if coord is None:
+ continue
+ dims = src.coord_dims(coord)
+ if set(src_dims).intersection(dims):
+ result_coord = regrid_reference_surface(
+ coord,
+ dims,
+ src_dims,
+ regrid_callback,
+ )
+ result.add_aux_coord(result_coord, dims)
+ coord_mapping[id(coord)] = result_coord
+ try:
+ result.add_aux_factory(factory.updated(coord_mapping))
+ except KeyError:
+ msg = (
+ "Cannot update aux_factory {!r} because of dropped"
+ " coordinates.".format(factory.name())
+ )
+ warnings.warn(msg)
+
+ return result
diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py
index fc64249729..bfa070c7c7 100644
--- a/lib/iris/analysis/_scipy_interpolate.py
+++ b/lib/iris/analysis/_scipy_interpolate.py
@@ -225,7 +225,6 @@ def compute_interp_weights(self, xi, method=None):
prepared = (xi_shape, method) + self._find_indices(xi.T)
if method == "linear":
-
xi_shape, method, indices, norm_distances, out_of_bounds = prepared
# Allocate arrays for describing the sparse matrix.
diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py
index 4630f47967..75b7d86406 100644
--- a/lib/iris/analysis/calculus.py
+++ b/lib/iris/analysis/calculus.py
@@ -147,6 +147,12 @@ def cube_delta(cube, coord):
.. note:: Missing data support not yet implemented.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
+
"""
# handle the case where a user passes a coordinate name
if isinstance(coord, str):
@@ -251,6 +257,11 @@ def differentiate(cube, coord_to_differentiate):
.. note:: Spherical differentiation does not occur in this routine.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
# Get the delta cube in the required differential direction.
# This operation results in a copy of the original cube.
@@ -532,6 +543,12 @@ def curl(i_cube, j_cube, k_cube=None):
where phi is longitude, theta is latitude.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
+
"""
# Get the vector quantity names.
# (i.e. ['easterly', 'northerly', 'vertical'])
@@ -577,7 +594,6 @@ def curl(i_cube, j_cube, k_cube=None):
horiz_cs, (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS)
)
if not spherical_coords:
-
# TODO Implement some mechanism for conforming to a common grid
dj_dx = _curl_differentiate(j_cube, x_coord)
prototype_diff = dj_dx
@@ -741,6 +757,12 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None):
#doctest: +SKIP
(['u', 'v', 'w'], 'wind')
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
+
"""
directional_names = (
("u", "v", "w"),
diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py
index f38e48354d..a8e90a63ad 100644
--- a/lib/iris/analysis/cartography.py
+++ b/lib/iris/analysis/cartography.py
@@ -66,6 +66,10 @@ def wrap_lons(lons, base, period):
>>> print(wrap_lons(np.array([185, 30, -200, 75]), -180, 360))
[-175. 30. 160. 75.]
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
# It is important to use 64bit floating precision when changing a floats
# numbers range.
@@ -169,20 +173,25 @@ def rotate_pole(lons, lats, pole_lon, pole_lat):
def _get_lon_lat_coords(cube):
- lat_coords = [
- coord for coord in cube.coords() if "latitude" in coord.name()
- ]
- lon_coords = [
- coord for coord in cube.coords() if "longitude" in coord.name()
- ]
+ def search_for_coord(coord_iterable, coord_name):
+ return [
+ coord for coord in coord_iterable if coord_name in coord.name()
+ ]
+
+ lat_coords = search_for_coord(
+ cube.dim_coords, "latitude"
+ ) or search_for_coord(cube.coords(), "latitude")
+ lon_coords = search_for_coord(
+ cube.dim_coords, "longitude"
+ ) or search_for_coord(cube.coords(), "longitude")
if len(lat_coords) > 1 or len(lon_coords) > 1:
raise ValueError(
- "Calling `_get_lon_lat_coords` with multiple lat or lon coords"
+ "Calling `_get_lon_lat_coords` with multiple same-type (i.e. dim/aux) lat or lon coords"
" is currently disallowed"
)
lat_coord = lat_coords[0]
lon_coord = lon_coords[0]
- return (lon_coord, lat_coord)
+ return lon_coord, lat_coord
def _xy_range(cube, mode=None):
@@ -266,6 +275,10 @@ def get_xy_grids(cube):
x, y = get_xy_grids(cube)
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y")
@@ -294,6 +307,11 @@ def get_xy_contiguous_bounded_grids(cube):
xs, ys = get_xy_contiguous_bounded_grids(cube)
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y")
@@ -493,6 +511,10 @@ def cosine_latitude_weights(cube):
cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
weights = np.sqrt(cosine_latitude_weights(cube))
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
# Find all latitude coordinates, we want one and only one.
lat_coords = [
@@ -578,6 +600,11 @@ def project(cube, target_proj, nx=None, ny=None):
An instance of :class:`iris.cube.Cube` and a list describing the
extent of the projection.
+ .. note::
+
+ If there are both dim and aux latitude-longitude coordinates, only
+ the dim coordinates will be used.
+
.. note::
This function assumes global data and will if necessary extrapolate
@@ -591,6 +618,11 @@ def project(cube, target_proj, nx=None, ny=None):
resulting nearest neighbour values. If masked, the value in the
resulting cube is set to 0.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
.. warning::
This function uses a nearest neighbour approach rather than any form
@@ -1065,6 +1097,11 @@ def rotate_winds(u_cube, v_cube, target_cs):
The names of the output cubes are those of the inputs, prefixed with
'transformed\_' (e.g. 'transformed_x_wind').
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
.. warning::
Conversion between rotated-pole and non-rotated systems can be
diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py
index a412a26ebc..b246b518d4 100644
--- a/lib/iris/analysis/geometry.py
+++ b/lib/iris/analysis/geometry.py
@@ -160,6 +160,11 @@ def geometry_area_weights(cube, geometry, normalize=False):
calculation might be wrong. In this case, a UserWarning will
be issued.
+ .. note::
+
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
Args:
* cube (:class:`iris.cube.Cube`):
diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py
index 468847bca2..09a02ad51c 100644
--- a/lib/iris/analysis/maths.py
+++ b/lib/iris/analysis/maths.py
@@ -115,6 +115,11 @@ def abs(cube, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place)
@@ -160,6 +165,11 @@ def intersection_of_cubes(cube, other_cube):
intersections = cubes.extract_overlapping(coords)
cube1, cube2 = (intersections[0], intersections[1])
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
wmsg = (
"iris.analysis.maths.intersection_of_cubes has been deprecated and will "
@@ -243,6 +253,11 @@ def add(cube, other, dim=None, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
@@ -292,6 +307,11 @@ def subtract(cube, other, dim=None, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
@@ -383,6 +403,10 @@ def multiply(cube, other, dim=None, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
_assert_is_cube(cube)
@@ -456,6 +480,10 @@ def divide(cube, other, dim=None, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
_assert_is_cube(cube)
@@ -519,6 +547,10 @@ def exponentiate(cube, exponent, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
@@ -567,6 +599,11 @@ def exp(cube, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(np.exp, cube.dtype, in_place=in_place)
@@ -593,6 +630,11 @@ def log(cube, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(np.log, cube.dtype, in_place=in_place)
@@ -623,6 +665,11 @@ def log2(cube, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(np.log2, cube.dtype, in_place=in_place)
@@ -649,6 +696,11 @@ def log10(cube, in_place=False):
Returns:
An instance of :class:`iris.cube.Cube`.
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(np.log10, cube.dtype, in_place=in_place)
@@ -703,6 +755,12 @@ def apply_ufunc(
cube = apply_ufunc(numpy.sin, cube, in_place=True)
+ .. note::
+
+ This function maintains laziness when called; it does not realise data. This is dependent on `ufunc` argument
+ being a numpy operation that is compatible with lazy operation.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
if not isinstance(ufunc, np.ufunc):
diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py
index 946ae1cb2c..24f7a9dede 100644
--- a/lib/iris/analysis/trajectory.py
+++ b/lib/iris/analysis/trajectory.py
@@ -85,7 +85,6 @@ def __init__(self, waypoints, sample_count=10):
cur_seg = segments[cur_seg_i]
len_accum = cur_seg.length
for p in range(self.sample_count):
-
# calculate the sample position along our total length
sample_at_len = p * sample_step
@@ -216,6 +215,10 @@ def interpolate(cube, sample_points, method=None):
('longitude', [-60, -50, -40])]
interpolated_cube = interpolate(cube, sample_points)
+ Notes
+ ------
+ This function does not maintain laziness when called; it realises data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
from iris.analysis import Linear
@@ -443,21 +446,7 @@ def interpolate(cube, sample_points, method=None):
]
# Apply the fancy indexing to get all the result data points.
- source_data = source_data[tuple(fancy_source_indices)]
-
- # "Fix" problems with missing datapoints producing odd values
- # when copied from a masked into an unmasked array.
- # TODO: proper masked data handling.
- if np.ma.isMaskedArray(source_data):
- # This is **not** proper mask handling, because we cannot produce a
- # masked result, but it ensures we use a "filled" version of the
- # input in this case.
- source_data = source_data.filled()
- new_cube.data[:] = source_data
- # NOTE: we assign to "new_cube.data[:]" and *not* just "new_cube.data",
- # because the existing code produces a default dtype from 'np.empty'
- # instead of preserving the input dtype.
- # TODO: maybe this should be fixed -- i.e. to preserve input dtype ??
+ new_cube.data = source_data[tuple(fancy_source_indices)]
# Fill in the empty squashed (non derived) coords.
column_coords = [
diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py
index 8ec39bb4b1..cb3149fe58 100644
--- a/lib/iris/common/metadata.py
+++ b/lib/iris/common/metadata.py
@@ -969,6 +969,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.
"""
+
# Perform "strict" combination for "coord_system" and "climatological".
def func(field):
left = getattr(self, field)
@@ -1024,6 +1025,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.
"""
+
# Perform "strict" difference for "coord_system" and "climatological".
def func(field):
left = getattr(self, field)
diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py
index 72019b4b87..698b4828f1 100644
--- a/lib/iris/coord_categorisation.py
+++ b/lib/iris/coord_categorisation.py
@@ -90,6 +90,7 @@ def vectorised_fn(*args):
# coordinates only
#
+
# Private "helper" function
def _pt_date(coord, time):
"""
diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py
index 802571925e..edf0c1871b 100644
--- a/lib/iris/coord_systems.py
+++ b/lib/iris/coord_systems.py
@@ -478,7 +478,6 @@ def datum(self, value):
@classmethod
def from_datum(cls, datum, longitude_of_prime_meridian=None):
-
crs = super().__new__(cls)
crs._semi_major_axis = None
@@ -949,7 +948,6 @@ def __init__(
false_northing=None,
ellipsoid=None,
):
-
"""
Constructs a Geostationary coord system.
diff --git a/lib/iris/coords.py b/lib/iris/coords.py
index d0d471a634..91bb786ae8 100644
--- a/lib/iris/coords.py
+++ b/lib/iris/coords.py
@@ -18,7 +18,6 @@
import warnings
import zlib
-import cftime
import dask.array as da
import numpy as np
import numpy.ma as ma
@@ -1345,7 +1344,14 @@ def __add__(self, mod):
return Cell(point, bound)
def __hash__(self):
- return super().__hash__()
+ # See __eq__ for the definition of when two cells are equal.
+ if self.bound is None:
+ return hash(self.point)
+ bound = self.bound
+ rbound = bound[::-1]
+ if rbound < bound:
+ bound = rbound
+ return hash((self.point, bound))
def __eq__(self, other):
"""
@@ -1411,16 +1417,6 @@ def __common_cmp__(self, other, operator_method):
):
raise ValueError("Unexpected operator_method")
- # Prevent silent errors resulting from missing cftime
- # behaviour.
- if isinstance(other, cftime.datetime) or (
- isinstance(self.point, cftime.datetime)
- and not isinstance(other, iris.time.PartialDateTime)
- ):
- raise TypeError(
- "Cannot determine the order of " "cftime.datetime objects"
- )
-
if isinstance(other, Cell):
# Cell vs Cell comparison for providing a strict sort order
if self.bound is None:
@@ -1485,19 +1481,7 @@ def __common_cmp__(self, other, operator_method):
else:
me = max(self.bound)
- # Work around to handle cftime.datetime comparison, which
- # doesn't return NotImplemented on failure in some versions of the
- # library
- try:
- result = operator_method(me, other)
- except TypeError:
- rop = {
- operator.lt: operator.gt,
- operator.gt: operator.lt,
- operator.le: operator.ge,
- operator.ge: operator.le,
- }[operator_method]
- result = rop(other, me)
+ result = operator_method(me, other)
return result
@@ -1895,7 +1879,22 @@ def cells(self):
...
"""
- return _CellIterator(self)
+ if self.ndim != 1:
+ raise iris.exceptions.CoordinateMultiDimError(self)
+
+ points = self.points
+ bounds = self.bounds
+ if self.units.is_time_reference():
+ points = self.units.num2date(points)
+ if self.has_bounds():
+ bounds = self.units.num2date(bounds)
+
+ if self.has_bounds():
+ for point, bound in zip(points, bounds):
+ yield Cell(point, bound)
+ else:
+ for point in points:
+ yield Cell(point)
def _sanity_check_bounds(self):
if self.ndim == 1:
@@ -2382,18 +2381,16 @@ def intersect(self, other, return_indices=False):
)
raise ValueError(msg)
- # Cache self.cells for speed. We can also use the index operation on a
- # list conveniently.
- self_cells = [cell for cell in self.cells()]
+ # Cache self.cells for speed. We can also use the dict for fast index
+ # lookup.
+ self_cells = {cell: idx for idx, cell in enumerate(self.cells())}
# Maintain a list of indices on self for which cells exist in both self
# and other.
self_intersect_indices = []
for cell in other.cells():
- try:
- self_intersect_indices.append(self_cells.index(cell))
- except ValueError:
- pass
+ if cell in self_cells:
+ self_intersect_indices.append(self_cells[cell])
if return_indices is False and self_intersect_indices == []:
raise ValueError(
@@ -2849,7 +2846,6 @@ def _new_bounds_requirements(self, bounds):
n_bounds = bounds.shape[-1]
n_points = bounds.shape[0]
if n_points > 1:
-
directions = set()
for b_index in range(n_bounds):
monotonic, direction = iris.util.monotonic(
@@ -3137,22 +3133,6 @@ def xml_element(self, doc):
return cellMethod_xml_element
-# See Coord.cells() for the description/context.
-class _CellIterator(Iterator):
- def __init__(self, coord):
- self._coord = coord
- if coord.ndim != 1:
- raise iris.exceptions.CoordinateMultiDimError(coord)
- self._indices = iter(range(coord.shape[0]))
-
- def __next__(self):
- # NB. When self._indices runs out it will raise StopIteration for us.
- i = next(self._indices)
- return self._coord.cell(i)
-
- next = __next__
-
-
# See ExplicitCoord._group() for the description/context.
class _GroupIterator(Iterator):
def __init__(self, points):
diff --git a/lib/iris/cube.py b/lib/iris/cube.py
index 8879ade621..abe37c35fb 100644
--- a/lib/iris/cube.py
+++ b/lib/iris/cube.py
@@ -185,6 +185,12 @@ def _assert_is_cube(obj):
)
raise ValueError(msg)
+ def _repr_html_(self):
+ from iris.experimental.representation import CubeListRepresentation
+
+ representer = CubeListRepresentation(self)
+ return representer.repr_html()
+
# TODO #370 Which operators need overloads?
def __add__(self, other):
@@ -878,7 +884,8 @@ def __init__(
This object defines the shape of the cube and the phenomenon
value in each cell.
- ``data`` can be a dask array, a NumPy array, a NumPy array
+ ``data`` can be a :class:`dask.array.Array`, a
+ :class:`numpy.ndarray`, a NumPy array
subclass (such as :class:`numpy.ma.MaskedArray`), or
array_like (as described in :func:`numpy.asarray`).
@@ -1152,7 +1159,9 @@ def add_aux_coord(self, coord, data_dims=None):
"""
if self.coords(coord): # TODO: just fail on duplicate object
- raise ValueError("Duplicate coordinates are not permitted.")
+ raise iris.exceptions.CannotAddError(
+ "Duplicate coordinates are not permitted."
+ )
self._add_unique_aux_coord(coord, data_dims)
def _check_multi_dim_metadata(self, metadata, data_dims):
@@ -1172,7 +1181,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims):
len(data_dims), metadata.ndim, metadata.name()
)
)
- raise ValueError(msg)
+ raise iris.exceptions.CannotAddError(msg)
# Check compatibility with the shape of the data
for i, dim in enumerate(data_dims):
if metadata.shape[i] != self.shape[dim]:
@@ -1180,7 +1189,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims):
"Unequal lengths. Cube dimension {} => {};"
" metadata {!r} dimension {} => {}."
)
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
msg.format(
dim,
self.shape[dim],
@@ -1192,7 +1201,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims):
elif metadata.shape != (1,):
msg = "Missing data dimensions for multi-valued {} {!r}"
msg = msg.format(metadata.__class__.__name__, metadata.name())
- raise ValueError(msg)
+ raise iris.exceptions.CannotAddError(msg)
return data_dims
def _add_unique_aux_coord(self, coord, data_dims):
@@ -1206,7 +1215,7 @@ def _add_unique_aux_coord(self, coord, data_dims):
"cube {item} of {ownval!r}."
)
if coord.mesh != mesh:
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
msg.format(
item="mesh",
coord=coord,
@@ -1216,7 +1225,7 @@ def _add_unique_aux_coord(self, coord, data_dims):
)
location = self.location
if coord.location != location:
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
msg.format(
item="location",
coord=coord,
@@ -1226,7 +1235,7 @@ def _add_unique_aux_coord(self, coord, data_dims):
)
mesh_dims = (self.mesh_dim(),)
if data_dims != mesh_dims:
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
msg.format(
item="mesh dimension",
coord=coord,
@@ -1266,7 +1275,9 @@ def coordsonly(coords_and_dims):
ref_coord = aux_factory.dependencies[dependency]
if ref_coord is not None and ref_coord not in cube_coords:
msg = "{} coordinate for factory is not present on cube {}"
- raise ValueError(msg.format(ref_coord.name(), self.name()))
+ raise iris.exceptions.CannotAddError(
+ msg.format(ref_coord.name(), self.name())
+ )
self._aux_factories.append(aux_factory)
def add_cell_measure(self, cell_measure, data_dims=None):
@@ -1293,7 +1304,9 @@ def add_cell_measure(self, cell_measure, data_dims=None):
"""
if self.cell_measures(cell_measure):
- raise ValueError("Duplicate cell_measures are not permitted.")
+ raise iris.exceptions.CannotAddError(
+ "Duplicate cell_measures are not permitted."
+ )
data_dims = self._check_multi_dim_metadata(cell_measure, data_dims)
self._cell_measures_and_dims.append((cell_measure, data_dims))
self._cell_measures_and_dims.sort(
@@ -1321,7 +1334,9 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None):
"""
if self.ancillary_variables(ancillary_variable):
- raise ValueError("Duplicate ancillary variables not permitted")
+ raise iris.exceptions.CannotAddError(
+ "Duplicate ancillary variables not permitted"
+ )
data_dims = self._check_multi_dim_metadata(
ancillary_variable, data_dims
@@ -1352,13 +1367,13 @@ def add_dim_coord(self, dim_coord, data_dim):
"""
if self.coords(dim_coord):
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
"The coordinate already exists on the cube. "
"Duplicate coordinates are not permitted."
)
# Check dimension is available
if self.coords(dimensions=data_dim, dim_coords=True):
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
"A dim_coord is already associated with "
"dimension %d." % data_dim
)
@@ -1366,12 +1381,14 @@ def add_dim_coord(self, dim_coord, data_dim):
def _add_unique_dim_coord(self, dim_coord, data_dim):
if isinstance(dim_coord, iris.coords.AuxCoord):
- raise ValueError("The dim_coord may not be an AuxCoord instance.")
+ raise iris.exceptions.CannotAddError(
+ "The dim_coord may not be an AuxCoord instance."
+ )
# Convert data_dim to a single integer
if isinstance(data_dim, Container):
if len(data_dim) != 1:
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
"The supplied data dimension must be a" " single number."
)
data_dim = int(list(data_dim)[0])
@@ -1380,7 +1397,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim):
# Check data_dim value is valid
if data_dim < 0 or data_dim >= self.ndim:
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
"The cube does not have the specified dimension "
"(%d)" % data_dim
)
@@ -1388,7 +1405,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim):
# Check compatibility with the shape of the data
if dim_coord.shape[0] != self.shape[data_dim]:
msg = "Unequal lengths. Cube dimension {} => {}; coord {!r} => {}."
- raise ValueError(
+ raise iris.exceptions.CannotAddError(
msg.format(
data_dim,
self.shape[data_dim],
@@ -1984,6 +2001,12 @@ def coord(
if name_or_coord is not None:
if not isinstance(name_or_coord, str):
_name = name_or_coord.name()
+ emsg = (
+ "Expected to find exactly 1 coordinate matching the given "
+ f"{_name!r} coordinate's metadata, but found none."
+ )
+ raise iris.exceptions.CoordinateNotFoundError(emsg)
+
bad_name = _name or standard_name or long_name or ""
emsg = (
f"Expected to find exactly 1 {bad_name!r} coordinate, "
@@ -2188,9 +2211,15 @@ def cell_measure(self, name_or_cell_measure=None):
bad_name = (
name_or_cell_measure and name_or_cell_measure.name()
) or ""
+ if name_or_cell_measure is not None:
+ emsg = (
+ "Expected to find exactly 1 cell measure matching the given "
+ f"{bad_name!r} cell measure's metadata, but found none."
+ )
+ raise iris.exceptions.CellMeasureNotFoundError(emsg)
msg = (
- "Expected to find exactly 1 %s cell_measure, but found "
- "none." % bad_name
+ f"Expected to find exactly 1 {bad_name!r} cell measure, "
+ "but found none."
)
raise iris.exceptions.CellMeasureNotFoundError(msg)
@@ -2275,9 +2304,16 @@ def ancillary_variable(self, name_or_ancillary_variable=None):
name_or_ancillary_variable
and name_or_ancillary_variable.name()
) or ""
+ if name_or_ancillary_variable is not None:
+ emsg = (
+ "Expected to find exactly 1 ancillary_variable matching the "
+ f"given {bad_name!r} ancillary_variable's metadata, but found "
+ "none."
+ )
+ raise iris.exceptions.AncillaryVariableNotFoundError(emsg)
msg = (
- "Expected to find exactly 1 {!s} ancillary_variable, but "
- "found none.".format(bad_name)
+ f"Expected to find exactly 1 {bad_name!r} ancillary_variable, "
+ "but found none."
)
raise iris.exceptions.AncillaryVariableNotFoundError(msg)
@@ -2293,10 +2329,23 @@ def cell_methods(self):
return self._metadata_manager.cell_methods
@cell_methods.setter
- def cell_methods(self, cell_methods):
- self._metadata_manager.cell_methods = (
- tuple(cell_methods) if cell_methods else tuple()
- )
+ def cell_methods(self, cell_methods: Iterable):
+ if not cell_methods:
+ # For backwards compatibility: Empty or null value is equivalent to ().
+ cell_methods = ()
+ else:
+ # Can supply any iterable, which is converted (copied) to a tuple.
+ cell_methods = tuple(cell_methods)
+ for cell_method in cell_methods:
+ # All contents should be CellMethods. Requiring class membership is
+ # somewhat non-Pythonic, but simple, and not a problem for now.
+ if not isinstance(cell_method, iris.coords.CellMethod):
+ msg = (
+ f"Cube.cell_methods assigned value includes {cell_method}, "
+ "which is not an iris.coords.CellMethod."
+ )
+ raise ValueError(msg)
+ self._metadata_manager.cell_methods = cell_methods
def core_data(self):
"""
@@ -2639,7 +2688,6 @@ def subset(self, coord):
coord_to_extract in self.aux_coords
and len(coord_to_extract.points) == 1
):
-
# Default to returning None
result = None
@@ -4038,8 +4086,9 @@ def aggregated_by(
# coordinate dimension.
shared_coords = list(
filter(
- lambda coord_: coord_ not in groupby_coords,
- self.coords(contains_dimension=dimension_to_groupby),
+ lambda coord_: coord_ not in groupby_coords
+ and dimension_to_groupby in self.coord_dims(coord_),
+ self.dim_coords + self.aux_coords,
)
)
@@ -4071,6 +4120,11 @@ def aggregated_by(
for coord in groupby_coords + shared_coords:
aggregateby_cube.remove_coord(coord)
+ coord_mapping = {}
+ for coord in aggregateby_cube.coords():
+ orig_id = id(self.coord(coord))
+ coord_mapping[orig_id] = coord
+
# Determine the group-by cube data shape.
data_shape = list(self.shape + aggregator.aggregate_shape(**kwargs))
data_shape[dimension_to_groupby] = len(groupby)
@@ -4199,6 +4253,11 @@ def aggregated_by(
aggregateby_cube.add_aux_coord(
new_coord, self.coord_dims(lookup_coord)
)
+ coord_mapping[id(self.coord(lookup_coord))] = new_coord
+
+ aggregateby_cube._aux_factories = []
+ for factory in self.aux_factories:
+ aggregateby_cube.add_aux_factory(factory.updated(coord_mapping))
# Attach the aggregate-by data into the aggregate-by cube.
if aggregateby_weights is None:
diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py
index 12d24ef70f..5d3da3349e 100644
--- a/lib/iris/exceptions.py
+++ b/lib/iris/exceptions.py
@@ -174,3 +174,9 @@ class UnitConversionError(IrisError):
"""Raised when Iris is unable to convert a unit."""
pass
+
+
+class CannotAddError(ValueError):
+ """Raised when an object (e.g. coord) cannot be added to a :class:`~iris.cube.Cube`."""
+
+ pass
diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py
index fb2e2af590..1b6c2d46be 100644
--- a/lib/iris/experimental/animate.py
+++ b/lib/iris/experimental/animate.py
@@ -6,118 +6,32 @@
"""
Wrapper for animating iris cubes using iris or matplotlib plotting functions
-"""
-
-import warnings
+Notes
+-----
+.. deprecated:: 3.4.0
-import matplotlib.animation as animation
-import matplotlib.pyplot as plt
+``iris.experimental.animate.animate()`` has been moved to
+:func:`iris.plot.animate`. This module will therefore be removed in a future
+release.
-import iris
+"""
def animate(cube_iterator, plot_func, fig=None, **kwargs):
"""
Animates the given cube iterator.
- Args:
-
- * cube_iterator (iterable of :class:`iris.cube.Cube` objects):
- Each animation frame corresponds to each :class:`iris.cube.Cube`
- object. See :meth:`iris.cube.Cube.slices`.
-
- * plot_func (:mod:`iris.plot` or :mod:`iris.quickplot` plotting function):
- Plotting function used to animate. Must accept the signature
- ``plot_func(cube, vmin=vmin, vmax=vmax, coords=coords)``.
- :func:`~iris.plot.contourf`, :func:`~iris.plot.contour`,
- :func:`~iris.plot.pcolor` and :func:`~iris.plot.pcolormesh`
- all conform to this signature.
-
- Kwargs:
-
- * fig (:class:`matplotlib.figure.Figure` instance):
- By default, the current figure will be used or a new figure instance
- created if no figure is available. See :func:`matplotlib.pyplot.gcf`.
-
- * coords (list of :class:`~iris.coords.Coord` objects or coordinate names):
- Use the given coordinates as the axes for the plot. The order of the
- given coordinates indicates which axis to use for each, where the first
- element is the horizontal axis of the plot and the second element is
- the vertical axis of the plot.
-
- * interval (int, float or long):
- Defines the time interval in milliseconds between successive frames.
- A default interval of 100ms is set.
-
- * vmin, vmax (int, float or long):
- Color scaling values, see :class:`matplotlib.colors.Normalize` for
- further details. Default values are determined by the min-max across
- the data set over the entire sequence.
-
- See :class:`matplotlib.animation.FuncAnimation` for details of other valid
- keyword arguments.
+ Warnings
+ --------
+ This function is now **disabled**.
- Returns:
- :class:`~matplotlib.animation.FuncAnimation` object suitable for
- saving and or plotting.
-
- For example, to animate along a set of cube slices::
-
- cube_iter = cubes.slices(('grid_longitude', 'grid_latitude'))
- ani = animate(cube_iter, qplt.contourf)
- plt.show()
+ The functionality has been moved to :func:`iris.plot.animate`.
"""
- kwargs.setdefault("interval", 100)
- coords = kwargs.pop("coords", None)
-
- if fig is None:
- fig = plt.gcf()
-
- def update_animation_iris(i, cubes, vmin, vmax, coords):
- # Clearing the figure is currently necessary for compatibility with
- # the iris quickploting module - due to the colorbar.
- plt.gcf().clf()
- plot_func(cubes[i], vmin=vmin, vmax=vmax, coords=coords)
-
- # Turn cube iterator into a list to determine plot ranges.
- # NOTE: we check that we are not providing a cube as this has a deprecated
- # iter special method.
- if hasattr(cube_iterator, "__iter__") and not isinstance(
- cube_iterator, iris.cube.Cube
- ):
- cubes = iris.cube.CubeList(cube_iterator)
- else:
- msg = "iterable type object required for animation, {} given".format(
- type(cube_iterator)
- )
- raise TypeError(msg)
-
- supported = ["iris.plot", "iris.quickplot"]
- if plot_func.__module__ not in supported:
- msg = (
- 'Given plotting module "{}" may not be supported, intended '
- "use: {}."
- )
- msg = msg.format(plot_func.__module__, supported)
- warnings.warn(msg, UserWarning)
-
- supported = ["contour", "contourf", "pcolor", "pcolormesh"]
- if plot_func.__name__ not in supported:
- msg = (
- 'Given plotting function "{}" may not be supported, intended '
- "use: {}."
- )
- msg = msg.format(plot_func.__name__, supported)
- warnings.warn(msg, UserWarning)
-
- # Determine plot range.
- vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes]))
- vmax = kwargs.pop("vmax", max([cc.data.max() for cc in cubes]))
-
- update = update_animation_iris
- frames = range(len(cubes))
-
- return animation.FuncAnimation(
- fig, update, frames=frames, fargs=(cubes, vmin, vmax, coords), **kwargs
+ msg = (
+ "The function 'iris.experimental.animate.animate()' has been moved, "
+ "and is now at 'iris.plot.animate()'.\n"
+ "Please replace 'iris.experimental.animate.animate' with "
+ "'iris.plot.animate'."
)
+ raise Exception(msg)
diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py
index bfa048ddf0..fdc23c7bc4 100644
--- a/lib/iris/experimental/regrid_conservative.py
+++ b/lib/iris/experimental/regrid_conservative.py
@@ -17,13 +17,15 @@
"""
+import functools
+
import cartopy.crs as ccrs
import numpy as np
import iris
from iris._deprecation import warn_deprecated
from iris.analysis._interpolation import get_xy_dim_coords
-from iris.analysis._regrid import RectilinearRegridder
+from iris.analysis._regrid import RectilinearRegridder, _create_cube
from iris.util import _meshgrid
wmsg = (
@@ -329,16 +331,23 @@ def _valid_units(coord):
# Return result as a new cube based on the source.
# TODO: please tidy this interface !!!
- return RectilinearRegridder._create_cube(
- fullcube_data,
- src=source_cube,
- x_dim=src_dims_xy[0],
- y_dim=src_dims_xy[1],
+ _regrid_callback = functools.partial(
+ RectilinearRegridder._regrid,
src_x_coord=src_coords[0],
src_y_coord=src_coords[1],
- grid_x_coord=dst_coords[0],
- grid_y_coord=dst_coords[1],
sample_grid_x=sample_grid_x,
sample_grid_y=sample_grid_y,
- regrid_callback=RectilinearRegridder._regrid,
+ )
+
+ def regrid_callback(*args, **kwargs):
+ _data, dims = args
+ return _regrid_callback(_data, *dims, **kwargs)
+
+ return _create_cube(
+ fullcube_data,
+ source_cube,
+ [src_dims_xy[0], src_dims_xy[1]],
+ [dst_coords[0], dst_coords[1]],
+ 2,
+ regrid_callback,
)
diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py
index 974a563046..0d566da73f 100644
--- a/lib/iris/experimental/ugrid/mesh.py
+++ b/lib/iris/experimental/ugrid/mesh.py
@@ -131,7 +131,7 @@ def __init__(
Args:
- * indices (numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array):
+ * indices (:class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array`):
2D array giving the topological connection relationship between
:attr:`location` elements and :attr:`connected` elements.
The :attr:`location_axis` dimension indexes over the
@@ -501,7 +501,7 @@ def core_indices(self):
NumPy array or a Dask array.
Returns:
- numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array
+ :class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array`
"""
return super()._core_values()
@@ -2841,16 +2841,60 @@ def __init__(
# Get the 'coord identity' metadata from the relevant node-coordinate.
node_coord = self.mesh.coord(include_nodes=True, axis=self.axis)
+ node_metadict = node_coord.metadata._asdict()
+ # Use node metadata, unless location is face/edge.
+ use_metadict = node_metadict.copy()
+ if location != "node":
+ # Location is either "edge" or "face" - get the relevant coord.
+ kwargs = {f"include_{location}s": True, "axis": axis}
+ location_coord = self.mesh.coord(**kwargs)
+
+ # Take the MeshCoord metadata from the 'location' coord.
+ use_metadict = location_coord.metadata._asdict()
+ unit_unknown = Unit(None)
+
+ # N.B. at present, coords in a Mesh are stored+accessed by 'axis', which
+ # means they must have a standard_name. So ...
+ # (a) the 'location' (face/edge) coord *always* has a useable phenomenon
+ # identity.
+ # (b) we still want to check that location+node coords have the same
+ # phenomenon (i.e. physical meaning identity + units), **but** ...
+ # (c) we will accept/ignore some differences : not just "var_name", but
+ # also "long_name" *and* "attributes". So it is *only* "standard_name"
+ # and "units" that cause an error if they differ.
+ for key in ("standard_name", "units"):
+ bounds_value = use_metadict[key]
+ nodes_value = node_metadict[key]
+ if key == "units" and (
+ bounds_value == unit_unknown or nodes_value == unit_unknown
+ ):
+ # Allow "any" unit to match no-units (for now)
+ continue
+ if bounds_value != nodes_value:
+
+ def fix_repr(val):
+ # Tidy values appearance by converting Unit to string, and
+ # wrapping strings in '', but leaving other types as a
+ # plain str() representation.
+ if isinstance(val, Unit):
+ val = str(val)
+ if isinstance(val, str):
+ val = repr(val)
+ return val
+
+ nodes_value, bounds_value = [
+ fix_repr(val) for val in (nodes_value, bounds_value)
+ ]
+ msg = (
+ f"Node coordinate {node_coord!r} disagrees with the "
+ f"{location} coordinate {location_coord!r}, "
+ f'in having a "{key}" value of {nodes_value} '
+ f"instead of {bounds_value}."
+ )
+ raise ValueError(msg)
+
# Call parent constructor to handle the common constructor args.
- super().__init__(
- points,
- bounds=bounds,
- standard_name=node_coord.standard_name,
- long_name=node_coord.long_name,
- var_name=None, # We *don't* "represent" the underlying node var
- units=node_coord.units,
- attributes=node_coord.attributes,
- )
+ super().__init__(points, bounds=bounds, **use_metadict)
# Define accessors for MeshCoord-specific properties mesh/location/axis.
# These are all read-only.
@@ -3083,9 +3127,7 @@ def _construct_access_arrays(self):
flat_inds_safe = al.where(missing_inds, 0, flat_inds_nomask)
# Here's the core indexing operation.
# The comma applies all inds-array values to the *first* dimension.
- bounds = node_points[
- flat_inds_safe,
- ]
+ bounds = node_points[flat_inds_safe,]
# Fix 'missing' locations, and restore the proper shape.
bounds = al.ma.masked_array(bounds, missing_inds)
bounds = bounds.reshape(indices.shape)
diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py
index ae0b787908..44bbe04fe9 100644
--- a/lib/iris/experimental/ugrid/metadata.py
+++ b/lib/iris/experimental/ugrid/metadata.py
@@ -53,6 +53,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.
"""
+
# Perform "strict" combination for "cf_role", "start_index", "location_axis".
def func(field):
left = getattr(self, field)
@@ -113,6 +114,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.
"""
+
# Perform "strict" difference for "cf_role", "start_index", "location_axis".
def func(field):
left = getattr(self, field)
@@ -233,6 +235,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.
"""
+
# Perform "strict" difference for "topology_dimension",
# "node_dimension", "edge_dimension" and "face_dimension".
def func(field):
@@ -297,6 +300,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.
"""
+
# It is actually "strict" : return None except where members are equal.
def func(field):
left = getattr(self, field)
@@ -352,6 +356,7 @@ def _difference_lenient(self, other):
A list of different metadata member values.
"""
+
# Perform "strict" difference for location / axis.
def func(field):
left = getattr(self, field)
diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py
index c075a659ac..35163c47d5 100644
--- a/lib/iris/fileformats/_nc_load_rules/helpers.py
+++ b/lib/iris/fileformats/_nc_load_rules/helpers.py
@@ -13,7 +13,6 @@
build routines, and which it does not use.
"""
-
import warnings
import cf_units
@@ -37,6 +36,8 @@
import iris.std_names
import iris.util
+# TODO: should un-addable coords / cell measures / etcetera be skipped? iris#5068.
+
#
# UD Units Constants (based on Unidata udunits.dat definition file)
#
@@ -853,6 +854,12 @@ def build_dimension_coordinate(
cf_coord_var, coord_name, attributes
)
+ coord_skipped_msg = (
+ f"{cf_coord_var.cf_name} coordinate not added to Cube: "
+ )
+ coord_skipped_msg += "{error}"
+ coord_skipped = False
+
# Create the coordinate.
try:
coord = iris.coords.DimCoord(
@@ -869,6 +876,11 @@ def build_dimension_coordinate(
)
except ValueError as e_msg:
# Attempt graceful loading.
+ msg = (
+ "Failed to create {name!r} dimension coordinate: {error}\n"
+ "Gracefully creating {name!r} auxiliary coordinate instead."
+ )
+ warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg))
coord = iris.coords.AuxCoord(
points_data,
standard_name=standard_name,
@@ -880,22 +892,26 @@ def build_dimension_coordinate(
coord_system=coord_system,
climatological=climatological,
)
- cube.add_aux_coord(coord, data_dims)
- msg = (
- "Failed to create {name!r} dimension coordinate: {error}\n"
- "Gracefully creating {name!r} auxiliary coordinate instead."
- )
- warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg))
+ try:
+ cube.add_aux_coord(coord, data_dims)
+ except iris.exceptions.CannotAddError as e_msg:
+ warnings.warn(coord_skipped_msg.format(error=e_msg))
+ coord_skipped = True
else:
# Add the dimension coordinate to the cube.
- if data_dims:
- cube.add_dim_coord(coord, data_dims)
- else:
- # Scalar coords are placed in the aux_coords container.
- cube.add_aux_coord(coord, data_dims)
+ try:
+ if data_dims:
+ cube.add_dim_coord(coord, data_dims)
+ else:
+ # Scalar coords are placed in the aux_coords container.
+ cube.add_aux_coord(coord, data_dims)
+ except iris.exceptions.CannotAddError as e_msg:
+ warnings.warn(coord_skipped_msg.format(error=e_msg))
+ coord_skipped = True
- # Update the coordinate to CF-netCDF variable mapping.
- engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name))
+ if not coord_skipped:
+ # Update the coordinate to CF-netCDF variable mapping.
+ engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name))
################################################################################
@@ -964,10 +980,14 @@ def build_auxiliary_coordinate(
)
# Add it to the cube
- cube.add_aux_coord(coord, data_dims)
-
- # Make a list with names, stored on the engine, so we can find them all later.
- engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name))
+ try:
+ cube.add_aux_coord(coord, data_dims)
+ except iris.exceptions.CannotAddError as e_msg:
+ msg = "{name!r} coordinate not added to Cube: {error}"
+ warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg))
+ else:
+ # Make a list with names, stored on the engine, so we can find them all later.
+ engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name))
################################################################################
@@ -1011,12 +1031,16 @@ def build_cell_measures(engine, cf_cm_var):
)
# Add it to the cube
- cube.add_cell_measure(cell_measure, data_dims)
-
- # Make a list with names, stored on the engine, so we can find them all later.
- engine.cube_parts["cell_measures"].append(
- (cell_measure, cf_cm_var.cf_name)
- )
+ try:
+ cube.add_cell_measure(cell_measure, data_dims)
+ except iris.exceptions.CannotAddError as e_msg:
+ msg = "{name!r} cell measure not added to Cube: {error}"
+ warnings.warn(msg.format(name=str(cf_cm_var.cf_name), error=e_msg))
+ else:
+ # Make a list with names, stored on the engine, so we can find them all later.
+ engine.cube_parts["cell_measures"].append(
+ (cell_measure, cf_cm_var.cf_name)
+ )
################################################################################
@@ -1056,10 +1080,16 @@ def build_ancil_var(engine, cf_av_var):
)
# Add it to the cube
- cube.add_ancillary_variable(av, data_dims)
-
- # Make a list with names, stored on the engine, so we can find them all later.
- engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name))
+ try:
+ cube.add_ancillary_variable(av, data_dims)
+ except iris.exceptions.CannotAddError as e_msg:
+ msg = "{name!r} ancillary variable not added to Cube: {error}"
+ warnings.warn(msg.format(name=str(cf_av_var.cf_name), error=e_msg))
+ else:
+ # Make a list with names, stored on the engine, so we can find them all later.
+ engine.cube_parts["ancillary_variables"].append(
+ (av, cf_av_var.cf_name)
+ )
################################################################################
diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py
index 5c70c5acf2..4dcd5ce6aa 100644
--- a/lib/iris/fileformats/abf.py
+++ b/lib/iris/fileformats/abf.py
@@ -219,7 +219,6 @@ def load_cubes(filespecs, callback=None):
for filespec in filespecs:
for filename in glob.glob(filespec):
-
field = ABFField(filename)
cube = field.to_cube()
diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py
index 3aaba3679e..b9b64a343e 100644
--- a/lib/iris/fileformats/name_loaders.py
+++ b/lib/iris/fileformats/name_loaders.py
@@ -571,7 +571,9 @@ def _generate_cubes(
cube.attributes[key] = value
if cell_methods is not None:
- cube.add_cell_method(cell_methods[i])
+ cell_method = cell_methods[i]
+ if cell_method is not None:
+ cube.add_cell_method(cell_method)
yield cube
@@ -610,7 +612,7 @@ def _build_cell_methods(av_or_ints, coord):
cell_method = None
msg = "Unknown {} statistic: {!r}. Unable to create cell method."
warnings.warn(msg.format(coord, av_or_int))
- cell_methods.append(cell_method)
+ cell_methods.append(cell_method) # NOTE: this can be a None
return cell_methods
@@ -992,7 +994,6 @@ def load_NAMEIII_version2(filename):
# using the next() method. This will come in handy as we wish to
# progress through the file line by line.
with open(filename, "r") as file_handle:
-
# define a dictionary to hold the header metadata about this file
header = read_header(file_handle)
@@ -1003,7 +1004,6 @@ def load_NAMEIII_version2(filename):
column_headings = {}
datacol1 = header["Number of preliminary cols"]
for line in file_handle:
-
data = [col.strip() for col in line.split(",")][:-1]
# If first column is not zero we have reached the end
diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py
index 1fb7d4e178..cff088cf89 100644
--- a/lib/iris/fileformats/pp.py
+++ b/lib/iris/fileformats/pp.py
@@ -625,7 +625,7 @@ def __getstate__(self):
def __setstate__(self, state):
# Because we have __slots__, this is needed to support Pickle.load()
# (Use setattr, as there is no object dictionary.)
- for (key, value) in state:
+ for key, value in state:
setattr(self, key, value)
def __eq__(self, other):
@@ -767,6 +767,18 @@ def _data_bytes_to_shaped_array(
else:
# Reform in row-column order
+ actual_length = np.prod(data.shape)
+ if (expected_length := np.prod(data_shape)) != actual_length:
+ if (expected_length < actual_length) and (data.ndim == 1):
+ # known use case where mule adds padding to data payload
+ # for a collapsed field.
+ data = data[:expected_length]
+ else:
+ emsg = (
+ f"PP field data containing {actual_length} words does not "
+ f"match expected length of {expected_length} words."
+ )
+ raise ValueError(emsg)
data.shape = data_shape
# Mask the array
@@ -2017,10 +2029,8 @@ def pp_filter(field):
res = True
if field.stash not in _STASH_ALLOW:
if pp_constraints.get("stash"):
-
res = False
for call_func in pp_constraints["stash"]:
-
if call_func(str(field.stash)):
res = True
break
diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py
index ebccec47ee..11d03e978a 100644
--- a/lib/iris/fileformats/pp_load_rules.py
+++ b/lib/iris/fileformats/pp_load_rules.py
@@ -756,7 +756,6 @@ def date2year(t_in):
)
)
):
-
coords_and_dims.append(
_new_coord_and_dims(
do_vector,
diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py
index e6b3748f9b..0369fc9fd0 100644
--- a/lib/iris/fileformats/pp_save_rules.py
+++ b/lib/iris/fileformats/pp_save_rules.py
@@ -422,7 +422,16 @@ def _grid_and_pole_rules(cube, pp):
lat_coord = vector_coord(cube, "latitude")
grid_lat_coord = vector_coord(cube, "grid_latitude")
- if lon_coord and not is_regular(lon_coord):
+ scalar_lon_coord = scalar_coord(cube, "longitude")
+
+ if lon_coord is None and grid_lon_coord is None and scalar_lon_coord:
+ # default value of 360.0 degrees to specify a circular wrap of
+ # the collapsed scalar longitude coordinate, based on examples
+ # of model output for several different diagnostics
+ pp.bdx = (unit := scalar_lon_coord.units) and unit.modulus or 360.0
+ pp.bzx = scalar_lon_coord.points[0] - pp.bdx
+ pp.lbnpt = scalar_lon_coord.shape[0]
+ elif lon_coord and not is_regular(lon_coord):
pp.bzx = 0
pp.bdx = 0
pp.lbnpt = lon_coord.shape[0]
diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py
index 07ed5eb8ce..51940b7c4d 100644
--- a/lib/iris/fileformats/rules.py
+++ b/lib/iris/fileformats/rules.py
@@ -394,7 +394,7 @@ def _load_pairs_from_fields_and_filenames(
yield (cube, field)
regrid_cache = {}
- for (cube, factories, field) in results_needing_reference:
+ for cube, factories, field in results_needing_reference:
_resolve_factory_references(
cube, factories, concrete_reference_targets, regrid_cache
)
diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py
index d193aa30ce..64b7f8e891 100644
--- a/lib/iris/fileformats/um/_fast_load_structured_fields.py
+++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py
@@ -133,6 +133,7 @@ def element_arrays_and_dims(self):
def _field_vector_element_arrays(self):
"""Define the field components used in the structure analysis."""
+
# Define functions to make t1 and t2 values as date-time tuples.
# These depend on header version (PPField2 has no seconds values).
def t1_fn(fld):
diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py
index 8aee67ae3e..01539960a5 100644
--- a/lib/iris/fileformats/um_cf_map.py
+++ b/lib/iris/fileformats/um_cf_map.py
@@ -16,10 +16,12 @@
LBFC_TO_CF = {
5: CFName('atmosphere_boundary_layer_thickness', None, 'm'),
16: CFName('air_temperature', None, 'K'),
+ 22: CFName('wet_bulb_potential_temperature', None, 'K'),
23: CFName('soil_temperature', None, 'K'),
27: CFName('air_density', None, 'kg m-3'),
36: CFName('land_area_fraction', None, '1'),
37: CFName('sea_ice_area_fraction', None, '1'),
+ 42: CFName('upward_air_velocity', None, 'm s-1'),
50: CFName('wind_speed', None, 'm s-1'),
56: CFName('x_wind', None, 'm s-1'),
57: CFName('y_wind', None, 'm s-1'),
@@ -28,11 +30,16 @@
83: CFName('potential_vorticity_of_atmosphere_layer', None, 'Pa-1 s-1'),
94: CFName('convective_rainfall_amount', None, 'kg m-2'),
97: CFName('rainfall_flux', None, 'kg m-2 s-1'),
+ 98: CFName('convective_rainfall_flux', None, 'kg m-2 s-1'),
+ 99: CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'),
102: CFName('stratiform_rainfall_amount', None, 'kg m-2'),
+ 106: CFName('soil_moisture_content', None, 'kg m-2'),
108: CFName('snowfall_flux', None, 'kg m-2 s-1'),
111: CFName('surface_runoff_amount', None, 'kg m-2'),
116: CFName('stratiform_snowfall_amount', None, 'kg m-2'),
117: CFName('convective_snowfall_amount', None, 'kg m-2'),
+ 118: CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'),
+ 119: CFName('convective_snowfall_flux', None, 'kg m-2 s-1'),
122: CFName('moisture_content_of_soil_layer', None, 'kg m-2'),
183: CFName('wind_speed', None, 'm s-1'),
200: CFName('toa_incoming_shortwave_flux', None, 'W m-2'),
@@ -899,7 +906,7 @@
'm01s30i301': CFName(None, 'Heavyside function on pressure levels', '1'),
'm01s30i302': CFName('virtual_temperature', None, 'K'),
'm01s30i310': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'),
- 'm01s30i311': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'),
+ 'm01s30i311': CFName('upward_transformed_eulerian_mean_air_velocity', None, 'm s-1'),
'm01s30i312': CFName('northward_eliassen_palm_flux_in_air', None, 'kg s-2'),
'm01s30i313': CFName('upward_eliassen_palm_flux_in_air', None, 'kg s-2'),
'm01s30i314': CFName('tendency_of_eastward_wind_due_to_eliassen_palm_flux_divergence', None, 'm s-2'),
@@ -1157,7 +1164,9 @@
CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'): 1720,
CFName('convective_cloud_area_fraction', None, '1'): 34,
CFName('convective_rainfall_amount', None, 'kg m-2'): 94,
+ CFName('convective_rainfall_flux', None, 'kg m-2 s-1'): 98,
CFName('convective_snowfall_amount', None, 'kg m-2'): 117,
+ CFName('convective_snowfall_flux', None, 'kg m-2 s-1'): 119,
CFName('dimensionless_exner_function', None, '1'): 7,
CFName('divergence_of_wind', None, 's-1'): 74,
CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'): 261,
@@ -1203,6 +1212,7 @@
CFName('soil_albedo', None, '1'): 1395,
CFName('soil_carbon_content', None, 'kg m-2'): 1397,
CFName('soil_hydraulic_conductivity_at_saturation', None, 'm s-1'): 333,
+ CFName('soil_moisture_content', None, 'kg m-2'): 106,
CFName('soil_moisture_content_at_field_capacity', None, 'kg m-2'): 1559,
CFName('soil_porosity', None, '1'): 332,
CFName('soil_suction_at_saturation', None, 'Pa'): 342,
@@ -1212,8 +1222,10 @@
CFName('specific_kinetic_energy_of_air', None, 'm2 s-2'): 60,
CFName('stratiform_cloud_area_fraction_in_atmosphere_layer', None, '1'): 220,
CFName('stratiform_rainfall_amount', None, 'kg m-2'): 102,
+ CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'): 99,
CFName('stratiform_rainfall_rate', None, 'kg m-2 s-1'): 99,
CFName('stratiform_snowfall_amount', None, 'kg m-2'): 116,
+ CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'): 118,
CFName('subsurface_runoff_amount', None, 'kg m-2'): 112,
CFName('subsurface_runoff_flux', None, 'kg m-2 s-1'): 1533,
CFName('surface_albedo_assuming_deep_snow', None, '1'): 328,
@@ -1260,6 +1272,7 @@
CFName('volume_fraction_of_condensed_water_in_soil_at_critical_point', None, '1'): 330,
CFName('volume_fraction_of_condensed_water_in_soil_at_wilting_point', None, '1'): 329,
CFName('water_potential_evaporation_flux', None, 'kg m-2 s-1'): 115,
+ CFName('wet_bulb_potential_temperature', None, 'K'): 22,
CFName('wind_mixing_energy_flux_into_sea_water', None, 'W m-2'): 182,
CFName('wind_speed', None, 'm s-1'): 50,
CFName('x_wind', None, 'm s-1'): 56,
diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py
index 4659f70ae3..7dd08c723c 100644
--- a/lib/iris/io/__init__.py
+++ b/lib/iris/io/__init__.py
@@ -59,6 +59,11 @@ def run_callback(callback, cube, field, filename):
It is possible that this function returns None for certain callbacks,
the caller of this function should handle this case.
+ .. note::
+
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
from iris.cube import Cube
@@ -424,6 +429,11 @@ def save(source, target, saver=None, **kwargs):
>>> # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option
>>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC")
+ Notes
+ ------
+
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
from iris.cube import Cube, CubeList
diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py
index edf448e95b..a8e333c566 100644
--- a/lib/iris/io/format_picker.py
+++ b/lib/iris/io/format_picker.py
@@ -134,8 +134,9 @@ def get_spec(self, basename, buffer_obj):
value = value[:50] + "..."
printable_values[key] = value
msg = (
- "No format specification could be found for the given buffer."
- " File element cache:\n {}".format(printable_values)
+ "No format specification could be found for the given buffer. "
+ "Perhaps a plugin is missing or has not been loaded. "
+ "File element cache:\n {}".format(printable_values)
)
raise ValueError(msg)
diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py
index 636635ee78..d6bac77d3b 100644
--- a/lib/iris/iterate.py
+++ b/lib/iris/iterate.py
@@ -58,6 +58,10 @@ def izip(*cubes, **kwargs):
... 'grid_longitude']):
... pass
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
if not cubes:
raise TypeError("Expected one or more cubes.")
diff --git a/lib/iris/palette.py b/lib/iris/palette.py
index 626ae4e341..a1c0a1e878 100644
--- a/lib/iris/palette.py
+++ b/lib/iris/palette.py
@@ -15,6 +15,7 @@
import re
import cf_units
+from matplotlib import colormaps as mpl_colormaps
import matplotlib.cm as mpl_cm
import matplotlib.colors as mpl_colors
import numpy as np
@@ -120,6 +121,11 @@ def cmap_norm(cube):
Tuple of :class:`matplotlib.colors.LinearSegmentedColormap` and
:class:`iris.palette.SymmetricNormalize`
+ Notes
+ ------
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
+
"""
args, kwargs = _default_cmap_norm((cube,), {})
return kwargs.get("cmap"), kwargs.get("norm")
@@ -337,7 +343,7 @@ def _load_palette():
)
# Register the color map for use.
- mpl_cm.register_cmap(cmap=cmap)
+ mpl_colormaps.register(cmap)
# Ensure to load the color map palettes.
diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py
index b00eb3f117..417b6b11de 100644
--- a/lib/iris/pandas.py
+++ b/lib/iris/pandas.py
@@ -9,7 +9,6 @@
See also: http://pandas.pydata.org/
"""
-
import datetime
from itertools import chain, combinations
import warnings
@@ -157,8 +156,8 @@ def as_cube(
Example usage::
- as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY})
- as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD})
+ as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY})
+ as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD})
"""
message = (
@@ -239,7 +238,7 @@ def as_cubes(
A :class:`~pandas.DataFrame` using columns as a second data dimension will
need to be 'melted' before conversion. See the Examples for how.
- Dask ``DataFrame``\\s are not supported.
+ :class:`dask.dataframe.DataFrame`\\ s are not supported.
Examples
--------
@@ -379,7 +378,10 @@ def as_cubes(
)
raise ValueError(message)
- if not pandas_index.is_monotonic:
+ if not (
+ pandas_index.is_monotonic_increasing
+ or pandas_index.is_monotonic_decreasing
+ ):
# Need monotonic index for use in DimCoord(s).
# This function doesn't sort_index itself since that breaks the
# option to return a data view instead of a copy.
@@ -518,26 +520,92 @@ def _get_base(array):
raise AssertionError(msg)
+def _make_dim_coord_list(cube):
+ """Get Dimension coordinates."""
+ outlist = []
+ for dimn in range(cube.ndim):
+ dimn_coord = cube.coords(dimensions=dimn, dim_coords=True)
+ if dimn_coord:
+ outlist += [
+ [dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]
+ ]
+ else:
+ outlist += [[f"dim{dimn}", range(cube.shape[dimn])]]
+ return list(zip(*outlist))
+
+
+def _make_aux_coord_list(cube):
+ """Get Auxiliary coordinates."""
+ outlist = []
+ for aux_coord in cube.coords(dim_coords=False):
+ outlist += [
+ [
+ aux_coord.name(),
+ cube.coord_dims(aux_coord),
+ _as_pandas_coord(aux_coord),
+ ]
+ ]
+ return list(chain.from_iterable([outlist]))
+
+
+def _make_ancillary_variables_list(cube):
+ """Get Ancillary variables."""
+ outlist = []
+ for ancil_var in cube.ancillary_variables():
+ outlist += [
+ [
+ ancil_var.name(),
+ cube.ancillary_variable_dims(ancil_var),
+ ancil_var.data,
+ ]
+ ]
+ return list(chain.from_iterable([outlist]))
+
+
+def _make_cell_measures_list(cube):
+ """Get cell measures."""
+ outlist = []
+ for cell_measure in cube.cell_measures():
+ outlist += [
+ [
+ cell_measure.name(),
+ cube.cell_measure_dims(cell_measure),
+ cell_measure.data,
+ ]
+ ]
+ return list(chain.from_iterable([outlist]))
+
+
def as_series(cube, copy=True):
"""
Convert a 1D cube to a Pandas Series.
- Args:
-
- * cube - The cube to convert to a Pandas Series.
-
- Kwargs:
-
- * copy - Whether to make a copy of the data.
- Defaults to True. Must be True for masked data.
+ .. deprecated:: 3.4.0
+ This function is scheduled for removal in a future release, being
+ replaced by :func:`iris.pandas.as_data_frame`, which offers improved
+ multi dimension handling.
- .. note::
+ Parameters
+ ----------
+ cube: :class:`Cube`
+ The cube to convert to a Pandas Series.
+ copy : bool, default=True
+ Whether to make a copy of the data.
+ Defaults to True. Must be True for masked data.
- This function will copy your data by default.
- If you have a large array that cannot be copied,
- make sure it is not masked and use copy=False.
+ Notes
+ -----
+ This function will copy your data by default.
+ If you have a large array that cannot be copied,
+ make sure it is not masked and use copy=False.
"""
+ message = (
+ "iris.pandas.as_series has been deprecated, and will be removed in a "
+ "future release. Please use iris.pandas.as_data_frame instead."
+ )
+ warn_deprecated(message)
+
data = cube.data
if ma.isMaskedArray(data):
if not copy:
@@ -545,61 +613,297 @@ def as_series(cube, copy=True):
data = data.astype("f").filled(np.nan)
elif copy:
data = data.copy()
-
index = None
if cube.dim_coords:
index = _as_pandas_coord(cube.dim_coords[0])
-
series = pandas.Series(data, index)
if not copy:
_assert_shared(data, series)
-
return series
-def as_data_frame(cube, copy=True):
+def as_data_frame(
+ cube,
+ copy=True,
+ add_aux_coords=False,
+ add_cell_measures=False,
+ add_ancillary_variables=False,
+):
"""
- Convert a 2D cube to a Pandas DataFrame.
+ Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`.
- Args:
+ :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are
+ flattened into a long-style :class:`~pandas.DataFrame`. Other
+ :attr:`~iris.cube.Cube.aux_coords`, :attr:`~iris.cube.Cube.aux_coords` and :attr:`~iris.cube.Cube.attributes`
+ may be optionally added as additional :class:`~pandas.DataFrame` columns.
- * cube - The cube to convert to a Pandas DataFrame.
+ Parameters
+ ----------
+ cube: :class:`~iris.cube.Cube`
+ The :class:`~iris.cube.Cube` to be converted to a :class:`pandas.DataFrame`.
+ copy : bool, default=True
+ Whether the :class:`pandas.DataFrame` is a copy of the the Cube
+ :attr:`~iris.cube.Cube.data`. This option is provided to help with memory
+ size concerns.
+ add_aux_coords : bool, default=False
+ If True, add all :attr:`~iris.cube.Cube.aux_coords` (including scalar
+ coordinates) to the returned :class:`pandas.DataFrame`.
+ add_cell_measures : bool, default=False
+ If True, add :attr:`~iris.cube.Cube.cell_measures` to the returned
+ :class:`pandas.DataFrame`.
+ add_ancillary_variables: bool, default=False
+ If True, add :attr:`~iris.cube.Cube.ancillary_variables` to the returned
+ :class:`pandas.DataFrame`.
+
+ Returns
+ -------
+ :class:`~pandas.DataFrame`
+ A :class:`~pandas.DataFrame` with :class:`~iris.cube.Cube` dimensions
+ forming a :class:`~pandas.MultiIndex`
+
+ Warnings
+ --------
+ #. This documentation is for the new ``as_data_frame()`` behaviour, which
+ is **currently opt-in** to preserve backwards compatibility. The default
+ legacy behaviour is documented in pre-``v3.4`` documentation (summary:
+ limited to 2-dimensional :class:`~iris.cube.Cube`\\ s, with only the
+ :attr:`~iris.cube.Cube.data` and :attr:`~iris.cube.Cube.dim_coords`
+ being added). The legacy behaviour will be removed in a future version
+ of Iris, so please opt-in to the new behaviour at your earliest
+ convenience, via :class:`iris.Future`:
+
+ >>> iris.FUTURE.pandas_ndim = True
- Kwargs:
+ **Breaking change:** to enable the improvements, the new opt-in
+ behaviour flattens multi-dimensional data into a single
+ :class:`~pandas.DataFrame` column (the legacy behaviour preserves 2
+ dimensions via rows and columns).
- * copy - Whether to make a copy of the data.
- Defaults to True. Must be True for masked data
- and some data types (see notes below).
+ |
- .. note::
+ #. Where the :class:`~iris.cube.Cube` contains masked values, these become
+ :data:`numpy.nan` in the returned :class:`~pandas.DataFrame`.
+
+ Notes
+ -----
+ :class:`dask.dataframe.DataFrame`\\ s are not supported.
- This function will copy your data by default.
- If you have a large array that cannot be copied,
- make sure it is not masked and use copy=False.
+ A :class:`~pandas.MultiIndex` :class:`~pandas.DataFrame` is returned by default.
+ Use the :meth:`~pandas.DataFrame.reset_index` to return a
+ :class:`~pandas.DataFrame` without :class:`~pandas.MultiIndex` levels. Use
+ 'inplace=True` to preserve memory object reference.
- .. note::
+ :class:`~iris.cube.Cube` data `dtype` is preserved.
- Pandas will sometimes make a copy of the array,
- for example when creating from an int32 array.
- Iris will detect this and raise an exception if copy=False.
+ Examples
+ --------
+ >>> import iris
+ >>> from iris.pandas import as_data_frame
+ >>> import pandas as pd
+ >>> pd.set_option('display.width', 1000)
+ >>> pd.set_option('display.max_columns', 1000)
+
+ Convert a simple :class:`~iris.cube.Cube`:
+
+ >>> path = iris.sample_data_path('ostia_monthly.nc')
+ >>> cube = iris.load_cube(path)
+ >>> df = as_data_frame(cube)
+ >>> print(df)
+ ... # doctest: +NORMALIZE_WHITESPACE
+ surface_temperature
+ time latitude longitude
+ 2006-04-16 00:00:00 -4.999992 0.000000 301.659271
+ 0.833333 301.785004
+ 1.666667 301.820984
+ 2.500000 301.865234
+ 3.333333 301.926819
+ ... ...
+ 2010-09-16 00:00:00 4.444450 355.833313 298.779938
+ 356.666656 298.913147
+ 357.500000 NaN
+ 358.333313 NaN
+ 359.166656 298.995148
+
+ [419904 rows x 1 columns]
+
+ Using ``add_aux_coords=True`` maps :class:`~iris.coords.AuxCoord` and scalar
+ coordinate information to the :class:`~pandas.DataFrame`:
+
+ >>> df = as_data_frame(cube, add_aux_coords=True)
+ >>> print(df)
+ ... # doctest: +NORMALIZE_WHITESPACE
+ surface_temperature forecast_period forecast_reference_time
+ time latitude longitude
+ 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00
+ 0.833333 301.785004 0 2006-04-16 12:00:00
+ 1.666667 301.820984 0 2006-04-16 12:00:00
+ 2.500000 301.865234 0 2006-04-16 12:00:00
+ 3.333333 301.926819 0 2006-04-16 12:00:00
+ ... ... ... ...
+ 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00
+ 356.666656 298.913147 0 2010-09-16 12:00:00
+ 357.500000 NaN 0 2010-09-16 12:00:00
+ 358.333313 NaN 0 2010-09-16 12:00:00
+ 359.166656 298.995148 0 2010-09-16 12:00:00
+
+ [419904 rows x 3 columns]
+
+ To add netCDF global attribution information to the :class:`~pandas.DataFrame`,
+ add a column directly to the :class:`~pandas.DataFrame`:
+
+ >>> df['STASH'] = str(cube.attributes['STASH'])
+ >>> print(df)
+ ... # doctest: +NORMALIZE_WHITESPACE
+ surface_temperature forecast_period forecast_reference_time STASH
+ time latitude longitude
+ 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00 m01s00i024
+ 0.833333 301.785004 0 2006-04-16 12:00:00 m01s00i024
+ 1.666667 301.820984 0 2006-04-16 12:00:00 m01s00i024
+ 2.500000 301.865234 0 2006-04-16 12:00:00 m01s00i024
+ 3.333333 301.926819 0 2006-04-16 12:00:00 m01s00i024
+ ... ... ... ... ...
+ 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00 m01s00i024
+ 356.666656 298.913147 0 2010-09-16 12:00:00 m01s00i024
+ 357.500000 NaN 0 2010-09-16 12:00:00 m01s00i024
+ 358.333313 NaN 0 2010-09-16 12:00:00 m01s00i024
+ 359.166656 298.995148 0 2010-09-16 12:00:00 m01s00i024
+
+ [419904 rows x 4 columns]
+
+ To return a :class:`~pandas.DataFrame` without a :class:`~pandas.MultiIndex`
+ use :meth:`~pandas.DataFrame.reset_index`. Optionally use `inplace=True` keyword
+ to modify the DataFrame rather than creating a new one:
+
+ >>> df.reset_index(inplace=True)
+ >>> print(df)
+ ... # doctest: +NORMALIZE_WHITESPACE
+ time latitude longitude surface_temperature forecast_period forecast_reference_time STASH
+ 0 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00 m01s00i024
+ 1 2006-04-16 00:00:00 -4.999992 0.833333 301.785004 0 2006-04-16 12:00:00 m01s00i024
+ 2 2006-04-16 00:00:00 -4.999992 1.666667 301.820984 0 2006-04-16 12:00:00 m01s00i024
+ 3 2006-04-16 00:00:00 -4.999992 2.500000 301.865234 0 2006-04-16 12:00:00 m01s00i024
+ 4 2006-04-16 00:00:00 -4.999992 3.333333 301.926819 0 2006-04-16 12:00:00 m01s00i024
+ ... ... ... ... ... ... ...
+ 419899 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00 m01s00i024
+ 419900 2010-09-16 00:00:00 4.444450 356.666656 298.913147 0 2010-09-16 12:00:00 m01s00i024
+ 419901 2010-09-16 00:00:00 4.444450 357.500000 NaN 0 2010-09-16 12:00:00 m01s00i024
+ 419902 2010-09-16 00:00:00 4.444450 358.333313 NaN 0 2010-09-16 12:00:00 m01s00i024
+ 419903 2010-09-16 00:00:00 4.444450 359.166656 298.995148 0 2010-09-16 12:00:00 m01s00i024
+
+ [419904 rows x 7 columns]
+
+ To retrieve a :class:`~pandas.Series` from `df` :class:`~pandas.DataFrame`,
+ subselect a column:
+
+ >>> df['surface_temperature']
+ 0 301.659271
+ 1 301.785004
+ 2 301.820984
+ 3 301.865234
+ 4 301.926819
+ ...
+ 419899 298.779938
+ 419900 298.913147
+ 419901 NaN
+ 419902 NaN
+ 419903 298.995148
+ Name: surface_temperature, Length: 419904, dtype: float32
"""
- data = cube.data
- if ma.isMaskedArray(data):
- if not copy:
- raise ValueError("Masked arrays must always be copied.")
- data = data.astype("f").filled(np.nan)
- elif copy:
- data = data.copy()
- index = columns = None
- if cube.coords(dimensions=[0]):
- index = _as_pandas_coord(cube.coord(dimensions=[0]))
- if cube.coords(dimensions=[1]):
- columns = _as_pandas_coord(cube.coord(dimensions=[1]))
+ def merge_metadata(meta_var_list):
+ """Add auxiliary cube metadata to the DataFrame"""
+ nonlocal data_frame
+ for meta_var_name, meta_var_index, meta_var in meta_var_list:
+ if not meta_var_index:
+ # Broadcast any meta var informtation without an associated
+ # dimension over the whole DataFrame
+ data_frame[meta_var_name] = meta_var.squeeze()
+ else:
+ meta_df = pandas.DataFrame(
+ meta_var.ravel(),
+ columns=[meta_var_name],
+ index=pandas.MultiIndex.from_product(
+ [coords[i] for i in meta_var_index],
+ names=[coord_names[i] for i in meta_var_index],
+ ),
+ )
+ # Merge to main data frame
+ data_frame = pandas.merge(
+ data_frame,
+ meta_df,
+ left_index=True,
+ right_index=True,
+ sort=False,
+ )
+ return data_frame
+
+ if iris.FUTURE.pandas_ndim:
+ # Checks
+ if not isinstance(cube, iris.cube.Cube):
+ raise TypeError(
+ f"Expected input to be iris.cube.Cube instance, got: {type(cube)}"
+ )
+ if copy:
+ data = cube.data.copy()
+ else:
+ data = cube.data
+ if ma.isMaskedArray(data):
+ if not copy:
+ raise ValueError("Masked arrays must always be copied.")
+ data = data.astype("f").filled(np.nan)
+
+ # Extract dim coord information: separate lists for dim names and dim values
+ coord_names, coords = _make_dim_coord_list(cube)
+ # Make base DataFrame
+ index = pandas.MultiIndex.from_product(coords, names=coord_names)
+ data_frame = pandas.DataFrame(
+ data.ravel(), columns=[cube.name()], index=index
+ )
+
+ if add_aux_coords:
+ data_frame = merge_metadata(_make_aux_coord_list(cube))
+ if add_ancillary_variables:
+ data_frame = merge_metadata(_make_ancillary_variables_list(cube))
+ if add_cell_measures:
+ data_frame = merge_metadata(_make_cell_measures_list(cube))
- data_frame = pandas.DataFrame(data, index, columns)
- if not copy:
- _assert_shared(data, data_frame)
+ if copy:
+ result = data_frame.reorder_levels(coord_names).sort_index()
+ else:
+ data_frame.reorder_levels(coord_names).sort_index(inplace=True)
+ result = data_frame
- return data_frame
+ else:
+ message = (
+ "You are using legacy 2-dimensional behaviour in"
+ "'iris.pandas.as_data_frame()'. This will be removed in a future"
+ "version of Iris. Please opt-in to the improved "
+ "n-dimensional behaviour at your earliest convenience by setting: "
+ "'iris.FUTURE.pandas_ndim = True'. More info is in the "
+ "documentation."
+ )
+ warnings.warn(message, FutureWarning)
+
+ # The legacy behaviour.
+ data = cube.data
+ if ma.isMaskedArray(data):
+ if not copy:
+ raise ValueError("Masked arrays must always be copied.")
+ data = data.astype("f").filled(np.nan)
+ elif copy:
+ data = data.copy()
+
+ index = columns = None
+ if cube.coords(dimensions=[0]):
+ index = _as_pandas_coord(cube.coord(dimensions=[0]))
+ if cube.coords(dimensions=[1]):
+ columns = _as_pandas_coord(cube.coord(dimensions=[1]))
+
+ data_frame = pandas.DataFrame(data, index, columns)
+ if not copy:
+ _assert_shared(data, data_frame)
+
+ result = data_frame
+
+ return result
diff --git a/lib/iris/plot.py b/lib/iris/plot.py
index 2da91e8c67..8cd849b716 100644
--- a/lib/iris/plot.py
+++ b/lib/iris/plot.py
@@ -13,11 +13,13 @@
import collections
import datetime
+import warnings
import cartopy.crs as ccrs
from cartopy.geodesic import Geodesic
import cartopy.mpl.geoaxes
import cftime
+import matplotlib.animation as animation
import matplotlib.axes
import matplotlib.collections as mpl_collections
import matplotlib.dates as mpl_dates
@@ -1344,11 +1346,6 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None):
axes=axes,
)
- # set the _is_stroked property to get a single color grid.
- # See https://github.com/matplotlib/matplotlib/issues/1302
- result._is_stroked = False
- if hasattr(result, "_wrapped_collection_fix"):
- result._wrapped_collection_fix._is_stroked = False
return result
@@ -1808,3 +1805,114 @@ def citation(text, figure=None, axes=None):
anchor.patch.set_boxstyle("round, pad=0, rounding_size=0.2")
axes = axes if axes else figure.gca()
axes.add_artist(anchor)
+
+
+def animate(cube_iterator, plot_func, fig=None, **kwargs):
+ """
+ Animates the given cube iterator.
+
+ Parameters
+ ----------
+ cube_iterator : iterable of :class:`iris.cube.Cube` objects
+ Each animation frame corresponds to each :class:`iris.cube.Cube`
+ object. See :meth:`iris.cube.Cube.slices`.
+ plot_func : :mod:`iris.plot` or :mod:`iris.quickplot` plotting function
+ Plotting function used to animate. Must accept the signature
+ ``plot_func(cube, vmin=vmin, vmax=vmax, coords=coords)``.
+ :func:`~iris.plot.contourf`, :func:`~iris.plot.contour`,
+ :func:`~iris.plot.pcolor` and :func:`~iris.plot.pcolormesh`
+ all conform to this signature.
+ fig : :class:`matplotlib.figure.Figure` instance, optional
+ By default, the current figure will be used or a new figure instance
+ created if no figure is available. See :func:`matplotlib.pyplot.gcf`.
+ **kwargs : dict, optional
+ Valid keyword arguments:
+
+ coords: list of :class:`~iris.coords.Coord` objects or coordinate names
+ Use the given coordinates as the axes for the plot. The order of the
+ given coordinates indicates which axis to use for each, where the first
+ element is the horizontal axis of the plot and the second element is
+ the vertical axis of the plot.
+ interval: int, float or long
+ Defines the time interval in milliseconds between successive frames.
+ A default interval of 100ms is set.
+ vmin, vmax: int, float or long
+ Color scaling values, see :class:`matplotlib.colors.Normalize` for
+ further details. Default values are determined by the min-max across
+ the data set over the entire sequence.
+
+ See :class:`matplotlib.animation.FuncAnimation` for details of other
+ valid keyword arguments.
+
+ Returns
+ -------
+ :class:`~matplotlib.animation.FuncAnimation` object suitable for
+ saving and or plotting.
+
+ Examples
+ --------
+ >>> import iris
+ >>> from iris import plot as iplt
+ >>> from iris import quickplot as qplt
+ >>> my_cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc"))
+
+ To animate along a set of :class:`~iris.cube.Cube` slices :
+
+ >>> cube_iter = my_cube.slices(("longitude", "latitude"))
+ >>> ani = iplt.animate(cube_iter, qplt.contourf)
+ >>> iplt.show()
+
+ """
+ kwargs.setdefault("interval", 100)
+ coords = kwargs.pop("coords", None)
+
+ if fig is None:
+ fig = plt.gcf()
+
+ def update_animation_iris(i, cubes, vmin, vmax, coords):
+ # Clearing the figure is currently necessary for compatibility with
+ # the iris quickploting module - due to the colorbar.
+ plt.gcf().clf()
+ plot_func(cubes[i], vmin=vmin, vmax=vmax, coords=coords)
+
+ # Turn cube iterator into a list to determine plot ranges.
+ # NOTE: we check that we are not providing a cube as this has a deprecated
+ # iter special method.
+ if hasattr(cube_iterator, "__iter__") and not isinstance(
+ cube_iterator, iris.cube.Cube
+ ):
+ cubes = iris.cube.CubeList(cube_iterator)
+ else:
+ msg = "iterable type object required for animation, {} given".format(
+ type(cube_iterator)
+ )
+ raise TypeError(msg)
+
+ supported = ["iris.plot", "iris.quickplot"]
+ if plot_func.__module__ not in supported:
+ msg = (
+ 'Given plotting module "{}" may not be supported, intended '
+ "use: {}."
+ )
+ msg = msg.format(plot_func.__module__, supported)
+ warnings.warn(msg, UserWarning)
+
+ supported = ["contour", "contourf", "pcolor", "pcolormesh"]
+ if plot_func.__name__ not in supported:
+ msg = (
+ 'Given plotting function "{}" may not be supported, intended '
+ "use: {}."
+ )
+ msg = msg.format(plot_func.__name__, supported)
+ warnings.warn(msg, UserWarning)
+
+ # Determine plot range.
+ vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes]))
+ vmax = kwargs.pop("vmax", max([cc.data.max() for cc in cubes]))
+
+ update = update_animation_iris
+ frames = range(len(cubes))
+
+ return animation.FuncAnimation(
+ fig, update, frames=frames, fargs=(cubes, vmin, vmax, coords), **kwargs
+ )
diff --git a/lib/iris/plugins/README.md b/lib/iris/plugins/README.md
new file mode 100644
index 0000000000..e8dee1de2c
--- /dev/null
+++ b/lib/iris/plugins/README.md
@@ -0,0 +1,10 @@
+# Iris plugins
+
+`iris.plugins` is a [namespace package] allowing arbitrary plugins to be
+installed alongside Iris.
+
+See [the Iris documentation][plugins] for more information.
+
+
+[namespace package]: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/
+[plugins]: https://scitools-iris.readthedocs.io/en/latest/community/plugins.html
diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py
index 18ed2554a3..6006314265 100644
--- a/lib/iris/quickplot.py
+++ b/lib/iris/quickplot.py
@@ -45,7 +45,6 @@ def _title(cube_or_coord, with_units):
or units.is_no_unit()
or units == cf_units.Unit("1")
):
-
if _use_symbol(units):
units = units.symbol
elif units.is_time_reference():
diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py
index 4840de8cdb..5529b899c5 100644
--- a/lib/iris/tests/__init__.py
+++ b/lib/iris/tests/__init__.py
@@ -16,7 +16,6 @@
import collections
from collections.abc import Mapping
import contextlib
-import datetime
import difflib
import filecmp
import functools
@@ -208,7 +207,7 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False):
)
-class IrisTest_nometa(unittest.TestCase):
+class IrisTest(unittest.TestCase):
"""A subclass of unittest.TestCase which provides Iris specific testing functionality."""
_assertion_counts = collections.defaultdict(int)
@@ -937,80 +936,6 @@ def assertEqualAndKind(self, value, expected):
)
-# An environment variable controls whether test timings are output.
-#
-# NOTE: to run tests with timing output, nosetests cannot be used.
-# At present, that includes not using "python setup.py test"
-# The typically best way is like this :
-# $ export IRIS_TEST_TIMINGS=1
-# $ python -m unittest discover -s iris.tests
-# and commonly adding ...
-# | grep "TIMING TEST" >iris_test_output.txt
-#
-_PRINT_TEST_TIMINGS = bool(int(os.environ.get("IRIS_TEST_TIMINGS", 0)))
-
-
-def _method_path(meth, cls):
- return ".".join([cls.__module__, cls.__name__, meth.__name__])
-
-
-def _testfunction_timing_decorator(fn, cls):
- # Function decorator for making a testcase print its execution time.
- @functools.wraps(fn)
- def inner(*args, **kwargs):
- start_time = datetime.datetime.now()
- try:
- result = fn(*args, **kwargs)
- finally:
- end_time = datetime.datetime.now()
- elapsed_time = (end_time - start_time).total_seconds()
- msg = '\n TEST TIMING -- "{}" took : {:12.6f} sec.'
- name = _method_path(fn, cls)
- print(msg.format(name, elapsed_time))
- return result
-
- return inner
-
-
-def iristest_timing_decorator(cls):
- # Class decorator to make all "test_.." functions print execution timings.
- if _PRINT_TEST_TIMINGS:
- # NOTE: 'dir' scans *all* class properties, including inherited ones.
- attr_names = dir(cls)
- for attr_name in attr_names:
- attr = getattr(cls, attr_name)
- if callable(attr) and attr_name.startswith("test"):
- attr = _testfunction_timing_decorator(attr, cls)
- setattr(cls, attr_name, attr)
- return cls
-
-
-class _TestTimingsMetaclass(type):
- # An alternative metaclass for IrisTest subclasses, which makes
- # them print execution timings for all the testcases.
- # This is equivalent to applying the @iristest_timing_decorator to
- # every test class that inherits from IrisTest.
- # NOTE: however, it means you *cannot* specify a different metaclass for
- # your test class inheriting from IrisTest.
- # See below for how to solve that where needed.
- def __new__(cls, clsname, base_classes, attrs):
- result = type.__new__(cls, clsname, base_classes, attrs)
- if _PRINT_TEST_TIMINGS:
- result = iristest_timing_decorator(result)
- return result
-
-
-class IrisTest(IrisTest_nometa, metaclass=_TestTimingsMetaclass):
- # Derive the 'ordinary' IrisTest from IrisTest_nometa, but add the
- # metaclass that enables test timings output.
- # This means that all subclasses also get the timing behaviour.
- # However, if a different metaclass is *wanted* for an IrisTest subclass,
- # this would cause a metaclass conflict.
- # Instead, you can inherit from IrisTest_nometa and apply the
- # @iristest_timing_decorator explicitly to your new testclass.
- pass
-
-
get_data_path = IrisTest.get_data_path
get_result_path = IrisTest.get_result_path
@@ -1019,11 +944,6 @@ class GraphicsTest(graphics.GraphicsTestMixin, IrisTest):
pass
-class GraphicsTest_nometa(graphics.GraphicsTestMixin, IrisTest_nometa):
- # Graphicstest without the metaclass providing test timings.
- pass
-
-
def skip_data(fn):
"""
Decorator to choose whether to run tests, based on the availability of
diff --git a/lib/iris/tests/graphics/README.md b/lib/iris/tests/graphics/README.md
index b26f1720e8..069fc01f70 100755
--- a/lib/iris/tests/graphics/README.md
+++ b/lib/iris/tests/graphics/README.md
@@ -24,7 +24,7 @@ perceived as it may be a simple pixel shift.
## Testing Strategy
-The `iris.tests.IrisTest_nometa.check_graphic` test routine calls out to
+The `iris.tests.IrisTest.check_graphic` test routine calls out to
`iris.tests.graphics.check_graphic` which tests against the **acceptable**
result. It does this using an image **hash** comparison technique which allows
us to be robust against minor variations based on underlying library updates.
diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py
index a083de3934..544d989564 100755
--- a/lib/iris/tests/graphics/__init__.py
+++ b/lib/iris/tests/graphics/__init__.py
@@ -187,7 +187,6 @@ def check_graphic(test_id: str, results_dir: Union[str, Path]) -> None:
try:
def _create_missing(phash: str) -> None:
-
output_path = test_output_dir / (test_id + ".png")
print(f"Creating image file: {output_path}")
@@ -214,7 +213,6 @@ def _create_missing(phash: str) -> None:
phash = get_phash(buffer)
if test_id in repo:
-
expected = hex_to_hash(repo[test_id])
# Calculate hamming distance vector for the result hash.
diff --git a/lib/iris/tests/experimental/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py
similarity index 94%
rename from lib/iris/tests/experimental/test_animate.py
rename to lib/iris/tests/integration/plot/test_animate.py
index d8010767b8..ef19dbb108 100644
--- a/lib/iris/tests/experimental/test_animate.py
+++ b/lib/iris/tests/integration/plot/test_animate.py
@@ -4,7 +4,7 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
-Test the animation of cubes within iris.
+Integration tests for :func:`iris.plot.animate`.
"""
@@ -19,7 +19,6 @@
# Run tests in no graphics mode if matplotlib is not available.
if tests.MPL_AVAILABLE:
- import iris.experimental.animate as animate
import iris.plot as iplt
@@ -57,7 +56,7 @@ def test_cube_animation(self):
# the animation.
cube_iter = self.cube.slices(("latitude", "longitude"))
- ani = animate.animate(cube_iter, iplt.contourf)
+ ani = iplt.animate(cube_iter, iplt.contourf)
# Disconnect the first draw callback to stop the animation.
ani._fig.canvas.mpl_disconnect(ani._first_draw_id)
diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py
index b8fbc5e31a..1b95899803 100644
--- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py
+++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py
@@ -38,10 +38,17 @@ def simple_cube_w_2d_coords():
class Test(tests.GraphicsTest):
def test_2d_coord_bounds_platecarree(self):
# To avoid a problem with Cartopy smearing the data where the
- # longitude wraps, we set the central_longitude
+ # longitude wraps, we set the central_longitude.
+ # SciTools/cartopy#1421
cube = simple_cube_w_2d_coords()[0, 0]
ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180))
qplt.pcolormesh(cube)
+
+ # Cartopy can't reliably set y-limits with curvilinear plotting.
+ # SciTools/cartopy#2121
+ y_lims = [m(cube.coord("latitude").points) for m in (np.min, np.max)]
+ ax.set_ylim(*y_lims)
+
ax.coastlines(resolution="110m", color="red")
self.check_graphic()
diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py
index 77b7f28249..6953534f2d 100755
--- a/lib/iris/tests/integration/test_Datums.py
+++ b/lib/iris/tests/integration/test_Datums.py
@@ -23,7 +23,6 @@ def setUp(self):
self.start_crs = ccrs.OSGB(False)
def test_transform_points_datum(self):
-
# Iris version
wgs84 = GeogCS.from_datum("WGS84")
iris_cs = LambertConformal(
diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py
index 3feb637bf8..851c539ade 100644
--- a/lib/iris/tests/integration/test_netcdf.py
+++ b/lib/iris/tests/integration/test_netcdf.py
@@ -21,11 +21,13 @@
import netCDF4 as nc
import numpy as np
import numpy.ma as ma
+import pytest
import iris
import iris.coord_systems
from iris.coords import CellMethod, DimCoord
from iris.cube import Cube, CubeList
+import iris.exceptions
from iris.fileformats.netcdf import (
CF_CONVENTIONS_VERSION,
Saver,
@@ -903,5 +905,54 @@ def test_netcdf_with_no_constraint(self):
self.assertEqual(len(cubes), 3)
+class TestSkippedCoord:
+ # If a coord/cell measure/etcetera cannot be added to the loaded Cube, a
+ # Warning is raised and the coord is skipped.
+ # This 'catching' is generic to all CannotAddErrors, but currently the only
+ # such problem that can exist in a NetCDF file is a mismatch of dimensions
+ # between phenomenon and coord.
+
+ cdl_core = """
+dimensions:
+ length_scale = 1 ;
+ lat = 3 ;
+variables:
+ float lat(lat) ;
+ lat:standard_name = "latitude" ;
+ lat:units = "degrees_north" ;
+ short lst_unc_sys(length_scale) ;
+ lst_unc_sys:long_name = "uncertainty from large-scale systematic
+ errors" ;
+ lst_unc_sys:units = "kelvin" ;
+ lst_unc_sys:coordinates = "lat" ;
+
+data:
+ lat = 0, 1, 2;
+ """
+
+ @pytest.fixture(autouse=True)
+ def create_nc_file(self, tmp_path):
+ file_name = "dim_mismatch"
+ cdl = f"netcdf {file_name}" + "{\n" + self.cdl_core + "\n}"
+ self.nc_path = (tmp_path / file_name).with_suffix(".nc")
+ ncgen_from_cdl(
+ cdl_str=cdl,
+ cdl_path=None,
+ nc_path=str(self.nc_path),
+ )
+ yield
+ self.nc_path.unlink()
+
+ def test_lat_not_loaded(self):
+ # iris#5068 includes discussion of possible retention of the skipped
+ # coords in the future.
+ with pytest.warns(
+ match="Missing data dimensions for multi-valued DimCoord"
+ ):
+ cube = iris.load_cube(self.nc_path)
+ with pytest.raises(iris.exceptions.CoordinateNotFoundError):
+ _ = cube.coord("lat")
+
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml
index b41c0e48c7..cc9deb4260 100644
--- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml
+++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml
@@ -5,6 +5,60 @@
+
+
+
+
+
+
+
@@ -65,6 +119,12 @@
[0.993097, 0.989272],
[0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/>
+
+
+
diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml
index 8617be9372..fb3d2cdbcf 100644
--- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml
+++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml
@@ -5,6 +5,65 @@
+
+
+
+
+
+
+
@@ -59,6 +118,11 @@
[0.993097, 0.989272],
[0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/>
+
+
+
diff --git a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml
index b863adcf55..7422bfe044 100644
--- a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml
index b46908a648..f9e0511ccb 100644
--- a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_full_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_full_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml
index c260587921..9a819eee9e 100644
--- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml
@@ -31,8 +31,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml
index e545e05fdc..9133d98e73 100644
--- a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml
index 4eedfc21b3..05aeab9ccb 100644
--- a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml
index 55155047bb..9dc3e08ee6 100644
--- a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml
index fc52fce0b3..7bb47c5296 100644
--- a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml
+++ b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml
@@ -20,8 +20,8 @@
...,
[-42.7342, -40.8934, -46.161, -48.912],
[-40.8934, -38.4268, -42.6612, -46.161],
- [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
- -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791,
+ -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377,
+ -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml
index 368b3508e3..8ccd602c11 100644
--- a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml
+++ b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml
@@ -20,8 +20,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -71,8 +71,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -122,8 +122,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -173,8 +173,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -224,8 +224,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -275,8 +275,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -326,8 +326,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -377,8 +377,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -428,8 +428,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -479,8 +479,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -530,8 +530,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -581,8 +581,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -632,8 +632,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -683,8 +683,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -734,8 +734,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -785,8 +785,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -836,8 +836,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
@@ -887,8 +887,8 @@
...,
[-37.7044, -36.9373, -37.9318, -38.7655],
[-36.9373, -36.1244, -37.0517, -37.9318],
- [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
- -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/>
+ [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421,
+ -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/>
+ [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759,
+ 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/>
diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json
index e5c2ad863a..92f0d8fc20 100644
--- a/lib/iris/tests/results/imagerepo.json
+++ b/lib/iris/tests/results/imagerepo.json
@@ -35,9 +35,9 @@
"gallery_tests.test_plot_wind_speed.0": "e9e960e996169306c1fe9e96c29e36739e03c06c3d61c07f3da139e1c07f3f01",
"gallery_tests.test_plot_wind_speed.1": "e9e960e996169306c1ee9f96c29e36739653c06c3d61c07f39a139e1c07f3f01",
"gallery_tests.test_plot_zonal_means.0": "b45b3071c9a4c9a6c69c363cc327cbb3cb9634d8c9e63cf336738c6634d8c384",
- "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.0": "fe81c17e817e3e81817e3e81857e7a817e81c17e7e81c17e7a81817e817e8c2e",
- "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.1": "fe81857e817e7a85817e7a81857e7e817e81917a7e81817e7a81817e817e843e",
- "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.2": "be81817ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2f",
+ "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.0": "fe81c17e817e3e81817e3e81857e7a817e81c17e7e81c17e7a81817e817e8c2e",
+ "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.1": "fe81857e817e7a85817e7a81857e7e817e81917a7e81817e7a81817e817e843e",
+ "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.2": "be81817ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2f",
"iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_northpolarstereo.0": "e59661969e699659c0f719a6c967339a1992c07f3649c09c3f612669c07b3f66",
"iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_platecarree.0": "ee856299954a1da699b6915ec25b6e419729c42c3f84bd9fa6d262d1d1dac076",
"iris.tests.integration.plot.test_plot_2d_coords.Test2dContour.test_2d_coords_contour.0": "b4b2643ecb05cb43b0f23d80c53c4e1d3e5990eb1f81c19f2f983cb1c4ff3e42",
diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml
new file mode 100644
index 0000000000..9bc2c0d1ac
--- /dev/null
+++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml
new file mode 100644
index 0000000000..8d1ad620d0
--- /dev/null
+++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/iris/tests/results/trajectory/hybrid_height.cml b/lib/iris/tests/results/trajectory/hybrid_height.cml
index 972fa7b330..28e821b900 100644
--- a/lib/iris/tests/results/trajectory/hybrid_height.cml
+++ b/lib/iris/tests/results/trajectory/hybrid_height.cml
@@ -60,7 +60,7 @@
-
+
+
diff --git a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml
index 750d597493..7b5bbfc086 100644
--- a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml
+++ b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml
@@ -1,6 +1,6 @@
-
+
@@ -144,6 +144,6 @@
-
+
diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml
index 9fc80a0e4d..e318abad67 100644
--- a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml
+++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml
@@ -50,12 +50,7 @@
-
-
-
-
-
-
+
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
-
-
-
-
-
+ [1196, 1197, 1198, 1199]]" id="e0db29d6" points="[3100, 3101, 3102, ..., 3197, 3198, 3199]" shape="(100,)" standard_name="longitude" units="Unit('unknown')" value_type="int64"/>
self.f)
self.assertTrue(self.e <= self.f)
self.assertTrue(self.f >= self.e)
diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py
index 034fb1dbda..bf3cddb8b7 100644
--- a/lib/iris/tests/test_cf.py
+++ b/lib/iris/tests/test_cf.py
@@ -276,9 +276,7 @@ def test_destructor(self):
didn't exist because opening the dataset had failed.
"""
with self.temp_filename(suffix=".nc") as fn:
-
with open(fn, "wb+") as fh:
-
fh.write(
b"\x89HDF\r\n\x1a\nBroken file with correct signature"
)
diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py
index 1972cdeb90..e568105f91 100644
--- a/lib/iris/tests/test_constraints.py
+++ b/lib/iris/tests/test_constraints.py
@@ -67,6 +67,27 @@ def test_constraints(self):
sub_list = self.slices.extract(constraint)
self.assertEqual(len(sub_list), 70 * 6)
+ def test_coord_availability(self):
+ # "model_level_number" coordinate available
+ constraint = iris.Constraint(model_level_number=lambda x: True)
+ result = self.slices.extract(constraint)
+ self.assertTrue(result)
+
+ # "wibble" coordinate is not available
+ constraint = iris.Constraint(wibble=lambda x: False)
+ result = self.slices.extract(constraint)
+ self.assertFalse(result)
+
+ # "wibble" coordinate is not available
+ constraint = iris.Constraint(wibble=lambda x: True)
+ result = self.slices.extract(constraint)
+ self.assertFalse(result)
+
+ # "lambda x: False" always (confusingly) throws away the cube
+ constraint = iris.Constraint(model_level_number=lambda x: False)
+ result = self.slices.extract(constraint)
+ self.assertFalse(result)
+
def test_mismatched_type(self):
constraint = iris.Constraint(model_level_number="aardvark")
sub_list = self.slices.extract(constraint)
diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py
index d33b76ddeb..82da82cfa9 100644
--- a/lib/iris/tests/test_io_init.py
+++ b/lib/iris/tests/test_io_init.py
@@ -126,7 +126,7 @@ def test_format_picker(self):
]
# test that each filespec is identified as the expected format
- for (expected_format_name, file_spec) in test_specs:
+ for expected_format_name, file_spec in test_specs:
test_path = tests.get_data_path(file_spec)
with open(test_path, "rb") as test_file:
a = iff.FORMAT_AGENT.get_spec(test_path, test_file)
diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py
index a71385b5bc..202c319b61 100644
--- a/lib/iris/tests/test_mapping.py
+++ b/lib/iris/tests/test_mapping.py
@@ -242,12 +242,6 @@ def test_pcolormesh(self):
iplt.pcolormesh(self.cube)
self.check_graphic()
- def test_grid(self):
- iplt.pcolormesh(self.cube, facecolors="none", edgecolors="blue")
- # the result is a graphic which has coloured edges. This is a mpl bug,
- # see https://github.com/matplotlib/matplotlib/issues/1302
- self.check_graphic()
-
def test_outline(self):
iplt.outline(self.cube)
self.check_graphic()
diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py
index c209d68da0..e53bbfb5f3 100644
--- a/lib/iris/tests/test_merge.py
+++ b/lib/iris/tests/test_merge.py
@@ -190,7 +190,7 @@ def setUp(self):
)
def test__ndarray_ndarray(self):
- for (lazy0, lazy1) in self.lazy_combos:
+ for lazy0, lazy1 in self.lazy_combos:
cubes = iris.cube.CubeList()
cubes.append(self._make_cube(0, dtype=self.dtype, lazy=lazy0))
cubes.append(self._make_cube(1, dtype=self.dtype, lazy=lazy1))
diff --git a/lib/iris/tests/test_name.py b/lib/iris/tests/test_name.py
index 2843673da8..b4e91bafd7 100644
--- a/lib/iris/tests/test_name.py
+++ b/lib/iris/tests/test_name.py
@@ -8,6 +8,9 @@
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests # isort:skip
+
+import tempfile
+
import iris
@@ -39,7 +42,7 @@ def test_NAMEIII_version2(self):
)
self.assertCMLApproxData(cubes, ("name", "NAMEIII_version2.cml"))
- def test_NAMEII_trajectory(self):
+ def test_NAMEIII_trajectory(self):
cubes = iris.load(
tests.get_data_path(("NAME", "NAMEIII_trajectory.txt"))
)
@@ -48,6 +51,32 @@ def test_NAMEII_trajectory(self):
cubes, ("name", "NAMEIII_trajectory.cml"), checksum=False
)
+ def test_NAMEII__no_time_averaging(self):
+ cubes = iris.load(
+ tests.get_data_path(("NAME", "NAMEII_no_time_averaging.txt"))
+ )
+
+ # Also check that it saves without error.
+ # This was previously failing, see https://github.com/SciTools/iris/issues/3288
+ with tempfile.TemporaryDirectory() as temp_dirpath:
+ iris.save(cubes, temp_dirpath + "/tmp.nc")
+
+ self.assertCML(
+ cubes[0],
+ (
+ "name",
+ "NAMEII_field__no_time_averaging_0.cml",
+ ),
+ )
+ self.assertCML(
+ cubes,
+ (
+ "name",
+ "NAMEII_field__no_time_averaging.cml",
+ ),
+ checksum=False,
+ )
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py
index 5017698a22..92e15a414a 100644
--- a/lib/iris/tests/test_netcdf.py
+++ b/lib/iris/tests/test_netcdf.py
@@ -313,9 +313,7 @@ def test_deferred_loading(self):
cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)],
("netcdf", "netcdf_deferred_tuple_1.cml"),
)
- subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][
- (1, 3),
- ]
+ subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][(1, 3),]
self.assertCML(subcube, ("netcdf", "netcdf_deferred_tuple_2.cml"))
# Consecutive mixture on same dimension.
@@ -1417,7 +1415,6 @@ def test_process_flags(self):
}
for bits, descriptions in multiple_map.items():
-
ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = descriptions
diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py
index a1d7bb298f..6d62623198 100644
--- a/lib/iris/tests/test_nimrod.py
+++ b/lib/iris/tests/test_nimrod.py
@@ -80,7 +80,8 @@ def test_huge_field_load(self):
@tests.skip_data
def test_load_kwarg(self):
"""Tests that the handle_metadata_errors kwarg is effective by setting it to
- False with a file with known incomplete meta-data (missing ellipsoid)."""
+ False with a file with known incomplete meta-data (missing ellipsoid).
+ """
datafile = "u1096_ng_ek00_pressure_2km"
with self.assertRaisesRegex(
TranslationError,
diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py
index 77aea2b6b6..c9eba31e58 100644
--- a/lib/iris/tests/test_plot.py
+++ b/lib/iris/tests/test_plot.py
@@ -712,9 +712,8 @@ def override_with_decorated_methods(attr_dict, target_dict, decorator):
@tests.skip_data
-@tests.iristest_timing_decorator
class TestPcolorNoBounds(
- tests.GraphicsTest_nometa, SliceMixin, metaclass=CheckForWarningsMetaclass
+ tests.GraphicsTest, SliceMixin, metaclass=CheckForWarningsMetaclass
):
"""
Test the iris.plot.pcolor routine on a cube with coordinates
@@ -729,9 +728,8 @@ def setUp(self):
@tests.skip_data
-@tests.iristest_timing_decorator
class TestPcolormeshNoBounds(
- tests.GraphicsTest_nometa, SliceMixin, metaclass=CheckForWarningsMetaclass
+ tests.GraphicsTest, SliceMixin, metaclass=CheckForWarningsMetaclass
):
"""
Test the iris.plot.pcolormesh routine on a cube with coordinates
diff --git a/lib/iris/tests/test_pp_stash.py b/lib/iris/tests/test_pp_stash.py
index b153aef0d4..42390ab2b3 100644
--- a/lib/iris/tests/test_pp_stash.py
+++ b/lib/iris/tests/test_pp_stash.py
@@ -86,7 +86,6 @@ def test_irregular_stash_str(self):
)
def test_illegal_stash_str_range(self):
-
self.assertEqual(iris.fileformats.pp.STASH(0, 2, 3), "m??s02i003")
self.assertNotEqual(iris.fileformats.pp.STASH(0, 2, 3), "m01s02i003")
@@ -124,7 +123,7 @@ def test_illegal_stash_format(self):
("m01s02003", (1, 2, 3)),
)
- for (test_value, reference) in test_values:
+ for test_value, reference in test_values:
msg = "Expected STASH code .* {!r}".format(test_value)
with self.assertRaisesRegex(ValueError, msg):
test_value == iris.fileformats.pp.STASH(*reference)
@@ -137,7 +136,7 @@ def test_illegal_stash_type(self):
(["m01s02i003"], "m01s02i003"),
)
- for (test_value, reference) in test_values:
+ for test_value, reference in test_values:
msg = "Expected STASH code .* {!r}".format(test_value)
with self.assertRaisesRegex(TypeError, msg):
iris.fileformats.pp.STASH.from_msi(test_value) == reference
diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py
index db182ae3f3..d8d5d73e95 100644
--- a/lib/iris/tests/test_util.py
+++ b/lib/iris/tests/test_util.py
@@ -161,7 +161,6 @@ def test_default_values(self):
)
def test_trim_string_with_no_spaces(self):
-
clip_length = 200
no_space_string = "a" * 500
diff --git a/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py
new file mode 100644
index 0000000000..612e5d8ecf
--- /dev/null
+++ b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py
@@ -0,0 +1,114 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Test function :func:`iris.analysis.cartography._get_lon_lat_coords"""
+
+import pytest
+
+from iris.analysis.cartography import _get_lon_lat_coords as g_lon_lat
+from iris.coords import AuxCoord
+from iris.tests.stock import lat_lon_cube
+
+
+@pytest.fixture
+def dim_only_cube():
+ return lat_lon_cube()
+
+
+def test_dim_only(dim_only_cube):
+ t_lat, t_lon = dim_only_cube.dim_coords
+
+ lon, lat = g_lon_lat(dim_only_cube)
+
+ assert lon == t_lon
+ assert lat == t_lat
+
+
+@pytest.fixture
+def dim_aux_cube(dim_only_cube):
+ lat_dim, lon_dim = dim_only_cube.dim_coords
+
+ lat_aux = AuxCoord.from_coord(lat_dim)
+ lat_aux.standard_name = "grid_latitude"
+ lon_aux = AuxCoord.from_coord(lon_dim)
+ lon_aux.standard_name = "grid_longitude"
+
+ dim_aux_cube = dim_only_cube
+ dim_aux_cube.add_aux_coord(lat_aux, 0)
+ dim_aux_cube.add_aux_coord(lon_aux, 1)
+
+ return dim_aux_cube
+
+
+def test_dim_aux(dim_aux_cube):
+ t_lat_dim, t_lon_dim = dim_aux_cube.dim_coords
+
+ lon, lat = g_lon_lat(dim_aux_cube)
+
+ assert lon == t_lon_dim
+ assert lat == t_lat_dim
+
+
+@pytest.fixture
+def aux_only_cube(dim_aux_cube):
+ lon_dim, lat_dim = dim_aux_cube.dim_coords
+
+ aux_only_cube = dim_aux_cube
+ aux_only_cube.remove_coord(lon_dim)
+ aux_only_cube.remove_coord(lat_dim)
+
+ return dim_aux_cube
+
+
+def test_aux_only(aux_only_cube):
+ aux_lat, aux_lon = aux_only_cube.aux_coords
+
+ lon, lat = g_lon_lat(aux_only_cube)
+
+ assert lon == aux_lon
+ assert lat == aux_lat
+
+
+@pytest.fixture
+def double_dim_cube(dim_only_cube):
+ double_dim_cube = dim_only_cube
+ double_dim_cube.coord("latitude").standard_name = "grid_longitude"
+
+ return double_dim_cube
+
+
+def test_double_dim(double_dim_cube):
+ t_error_message = "with multiple.*is currently disallowed"
+
+ with pytest.raises(ValueError, match=t_error_message):
+ g_lon_lat(double_dim_cube)
+
+
+@pytest.fixture
+def double_aux_cube(aux_only_cube):
+ double_aux_cube = aux_only_cube
+ double_aux_cube.coord("grid_latitude").standard_name = "longitude"
+
+ return double_aux_cube
+
+
+def test_double_aux(double_aux_cube):
+ t_error_message = "with multiple.*is currently disallowed"
+
+ with pytest.raises(ValueError, match=t_error_message):
+ g_lon_lat(double_aux_cube)
+
+
+@pytest.fixture
+def missing_lat_cube(dim_only_cube):
+ missing_lat_cube = dim_only_cube
+ missing_lat_cube.remove_coord("latitude")
+
+ return missing_lat_cube
+
+
+def test_missing_coord(missing_lat_cube):
+ with pytest.raises(IndexError):
+ g_lon_lat(missing_lat_cube)
diff --git a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py
index 6b957baec6..810851362e 100644
--- a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py
+++ b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py
@@ -94,7 +94,6 @@ def _check_multiple_orientations_and_latitudes(
atol_degrees=0.005,
cellsize_degrees=1.0,
):
-
cube = _2d_multicells_testcube(cellsize_degrees=cellsize_degrees)
# Calculate gridcell angles at each point.
diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py
index 311da8a0e6..558a6fccfe 100644
--- a/lib/iris/tests/unit/analysis/maths/__init__.py
+++ b/lib/iris/tests/unit/analysis/maths/__init__.py
@@ -247,7 +247,7 @@ def test_partial_mask_second_lazy_not_in_place(self):
def test_in_place_introduces_mask(self):
# If second cube is masked, result should also be masked.
- data1 = np.arange(4, dtype=np.float)
+ data1 = np.arange(4, dtype=float)
data2 = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 1, 0, 0])
cube1 = Cube(data1)
cube2 = Cube(data2)
diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py
index 51f71affb0..57e012e1c9 100644
--- a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py
+++ b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py
@@ -16,9 +16,8 @@
@tests.skip_data
-@tests.iristest_timing_decorator
class TestBroadcastingDerived(
- tests.IrisTest_nometa,
+ tests.IrisTest,
MathsAddOperationMixin,
CubeArithmeticBroadcastingTestMixin,
):
diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py
index 1d81e7b480..e1255ef9d8 100644
--- a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py
+++ b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py
@@ -54,9 +54,8 @@ def _base_testcube(self):
@tests.skip_data
-@tests.iristest_timing_decorator
class TestBroadcastingWithMesh(
- tests.IrisTest_nometa,
+ tests.IrisTest,
MeshLocationsMixin,
MathsAddOperationMixin,
CubeArithmeticBroadcastingTestMixin,
@@ -71,9 +70,8 @@ class TestBroadcastingWithMesh(
@tests.skip_data
-@tests.iristest_timing_decorator
class TestBroadcastingWithMeshAndDerived(
- tests.IrisTest_nometa,
+ tests.IrisTest,
MeshLocationsMixin,
MathsAddOperationMixin,
CubeArithmeticBroadcastingTestMixin,
diff --git a/lib/iris/tests/unit/analysis/maths/test_add.py b/lib/iris/tests/unit/analysis/maths/test_add.py
index 77dd7877bf..1ca7f7c244 100644
--- a/lib/iris/tests/unit/analysis/maths/test_add.py
+++ b/lib/iris/tests/unit/analysis/maths/test_add.py
@@ -21,10 +21,7 @@
@tests.skip_data
-@tests.iristest_timing_decorator
-class TestBroadcasting(
- tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin
-):
+class TestBroadcasting(tests.IrisTest, CubeArithmeticBroadcastingTestMixin):
@property
def data_op(self):
return operator.add
@@ -34,8 +31,7 @@ def cube_func(self):
return add
-@tests.iristest_timing_decorator
-class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin):
+class TestMasking(tests.IrisTest, CubeArithmeticMaskingTestMixin):
@property
def data_op(self):
return operator.add
@@ -57,9 +53,8 @@ def test_reversed_points(self):
add(cube1, cube2)
-@tests.iristest_timing_decorator
class TestMaskedConstant(
- tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin
+ tests.IrisTest, CubeArithmeticMaskedConstantTestMixin
):
@property
def data_op(self):
diff --git a/lib/iris/tests/unit/analysis/maths/test_divide.py b/lib/iris/tests/unit/analysis/maths/test_divide.py
index 1763f223b0..4bd202e037 100644
--- a/lib/iris/tests/unit/analysis/maths/test_divide.py
+++ b/lib/iris/tests/unit/analysis/maths/test_divide.py
@@ -23,10 +23,7 @@
@tests.skip_data
-@tests.iristest_timing_decorator
-class TestBroadcasting(
- tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin
-):
+class TestBroadcasting(tests.IrisTest, CubeArithmeticBroadcastingTestMixin):
@property
def data_op(self):
return operator.truediv
@@ -36,8 +33,7 @@ def cube_func(self):
return divide
-@tests.iristest_timing_decorator
-class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin):
+class TestMasking(tests.IrisTest, CubeArithmeticMaskingTestMixin):
@property
def data_op(self):
return operator.truediv
diff --git a/lib/iris/tests/unit/analysis/maths/test_multiply.py b/lib/iris/tests/unit/analysis/maths/test_multiply.py
index 600593c64b..266342605a 100644
--- a/lib/iris/tests/unit/analysis/maths/test_multiply.py
+++ b/lib/iris/tests/unit/analysis/maths/test_multiply.py
@@ -21,10 +21,7 @@
@tests.skip_data
-@tests.iristest_timing_decorator
-class TestBroadcasting(
- tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin
-):
+class TestBroadcasting(tests.IrisTest, CubeArithmeticBroadcastingTestMixin):
@property
def data_op(self):
return operator.mul
@@ -34,8 +31,7 @@ def cube_func(self):
return multiply
-@tests.iristest_timing_decorator
-class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin):
+class TestMasking(tests.IrisTest, CubeArithmeticMaskingTestMixin):
@property
def data_op(self):
return operator.mul
@@ -57,9 +53,8 @@ def test_reversed_points(self):
multiply(cube1, cube2)
-@tests.iristest_timing_decorator
class TestMaskedConstant(
- tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin
+ tests.IrisTest, CubeArithmeticMaskedConstantTestMixin
):
@property
def data_op(self):
diff --git a/lib/iris/tests/unit/analysis/maths/test_subtract.py b/lib/iris/tests/unit/analysis/maths/test_subtract.py
index 964e8c04c7..f7a9df34d0 100644
--- a/lib/iris/tests/unit/analysis/maths/test_subtract.py
+++ b/lib/iris/tests/unit/analysis/maths/test_subtract.py
@@ -21,10 +21,7 @@
@tests.skip_data
-@tests.iristest_timing_decorator
-class TestBroadcasting(
- tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin
-):
+class TestBroadcasting(tests.IrisTest, CubeArithmeticBroadcastingTestMixin):
@property
def data_op(self):
return operator.sub
@@ -34,8 +31,7 @@ def cube_func(self):
return subtract
-@tests.iristest_timing_decorator
-class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin):
+class TestMasking(tests.IrisTest, CubeArithmeticMaskingTestMixin):
@property
def data_op(self):
return operator.sub
@@ -57,9 +53,8 @@ def test_reversed_points(self):
subtract(cube1, cube2)
-@tests.iristest_timing_decorator
class TestMaskedConstant(
- tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin
+ tests.IrisTest, CubeArithmeticMaskedConstantTestMixin
):
@property
def data_op(self):
diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py
index 68db839d06..9b0160aee4 100644
--- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py
+++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py
@@ -15,11 +15,12 @@
from iris.analysis._regrid import CurvilinearRegridder as Regridder
from iris.analysis.cartography import rotate_pole
+from iris.aux_factory import HybridHeightFactory
from iris.coord_systems import GeogCS, RotatedGeogCS
from iris.coords import AuxCoord, DimCoord
from iris.cube import Cube
from iris.fileformats.pp import EARTH_RADIUS
-from iris.tests.stock import global_pp, lat_lon_cube
+from iris.tests.stock import global_pp, lat_lon_cube, realistic_4d
RESULT_DIR = ("analysis", "regrid")
@@ -169,6 +170,88 @@ def test_caching(self):
)
+class Test__derived_coord(tests.IrisTest):
+ def setUp(self):
+ src = realistic_4d()[0]
+ tgt = realistic_4d()
+ new_lon, new_lat = np.meshgrid(
+ src.coord("grid_longitude").points,
+ src.coord("grid_latitude").points,
+ )
+ coord_system = src.coord("grid_latitude").coord_system
+ lat = AuxCoord(
+ new_lat, standard_name="latitude", coord_system=coord_system
+ )
+ lon = AuxCoord(
+ new_lon, standard_name="longitude", coord_system=coord_system
+ )
+ lat_t = AuxCoord(
+ new_lat.T, standard_name="latitude", coord_system=coord_system
+ )
+ lon_t = AuxCoord(
+ new_lon.T, standard_name="longitude", coord_system=coord_system
+ )
+
+ src.remove_coord("grid_latitude")
+ src.remove_coord("grid_longitude")
+ src_t = src.copy()
+ src.add_aux_coord(lat, [1, 2])
+ src.add_aux_coord(lon, [1, 2])
+ src_t.add_aux_coord(lat_t, [2, 1])
+ src_t.add_aux_coord(lon_t, [2, 1])
+ self.src = src.copy()
+ self.src_t = src_t
+ self.tgt = tgt
+ self.altitude = src.coord("altitude")
+ transposed_src = src.copy()
+ transposed_src.transpose([0, 2, 1])
+ self.altitude_transposed = transposed_src.coord("altitude")
+
+ def test_no_transpose(self):
+ rg = Regridder(self.src, self.tgt)
+ res = rg(self.src)
+
+ assert len(res.aux_factories) == 1 and isinstance(
+ res.aux_factories[0], HybridHeightFactory
+ )
+ assert np.allclose(res.coord("altitude").points, self.altitude.points)
+
+ def test_cube_transposed(self):
+ rg = Regridder(self.src, self.tgt)
+ transposed_cube = self.src.copy()
+ transposed_cube.transpose([0, 2, 1])
+ res = rg(transposed_cube)
+
+ assert len(res.aux_factories) == 1 and isinstance(
+ res.aux_factories[0], HybridHeightFactory
+ )
+ assert np.allclose(
+ res.coord("altitude").points, self.altitude_transposed.points
+ )
+
+ def test_coord_transposed(self):
+ rg = Regridder(self.src_t, self.tgt)
+ res = rg(self.src_t)
+
+ assert len(res.aux_factories) == 1 and isinstance(
+ res.aux_factories[0], HybridHeightFactory
+ )
+ assert np.allclose(
+ res.coord("altitude").points, self.altitude_transposed.points
+ )
+
+ def test_both_transposed(self):
+ rg = Regridder(self.src_t, self.tgt)
+ transposed_cube = self.src_t.copy()
+ transposed_cube.transpose([0, 2, 1])
+ res = rg(transposed_cube)
+
+ assert len(res.aux_factories) == 1 and isinstance(
+ res.aux_factories[0], HybridHeightFactory
+ )
+ assert np.allclose(res.coord("altitude").points, self.altitude.points)
+
+
@tests.skip_data
class Test___call____bad_src(tests.IrisTest):
def setUp(self):
@@ -219,7 +302,7 @@ def test_multidim(self):
grid_cube.add_dim_coord(grid_y_coord, 0)
grid_cube.add_dim_coord(grid_x_coord, 1)
- # Define some key points in true-lat/lon thta have known positions
+ # Define some key points in true-lat/lon that have known positions
# First 3x2 points in the centre of each output cell.
x_centres, y_centres = np.meshgrid(
grid_x_coord.points, grid_y_coord.points
diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py
index dad781ed74..f1b9711068 100644
--- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py
+++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py
@@ -12,7 +12,10 @@
# importing anything else.
import iris.tests as tests # isort:skip
+from collections import namedtuple
+
import numpy as np
+import pytest
from iris.analysis.trajectory import interpolate
from iris.coords import AuxCoord, DimCoord
@@ -38,15 +41,15 @@ def test_unknown_method(self):
interpolate(cube, sample_point, method="linekar")
-class TestNearest(tests.IrisTest):
+class TestNearest:
# Test interpolation with 'nearest' method.
# This is basically a wrapper to the routine:
# 'analysis._interpolate_private._nearest_neighbour_indices_ndcoords'.
# That has its own test, so we don't test the basic calculation
# exhaustively here. Instead we check the way it handles the source and
# result cubes (especially coordinates).
-
- def setUp(self):
+ @pytest.fixture
+ def src_cube(self):
cube = iris.tests.stock.simple_3d()
# Actually, this cube *isn't* terribly realistic, as the lat+lon coords
# have integer type, which in this case produces some peculiar results.
@@ -54,46 +57,43 @@ def setUp(self):
for coord_name in ("longitude", "latitude"):
coord = cube.coord(coord_name)
coord.points = coord.points.astype(float)
- self.test_cube = cube
+ return cube
+
+ @pytest.fixture
+ def single_point(self, src_cube):
# Define coordinates for a single-point testcase.
y_val, x_val = 0, -90
- # Work out cube indices of the testpoint.
- self.single_point_iy = np.where(
- cube.coord("latitude").points == y_val
- )[0][0]
- self.single_point_ix = np.where(
- cube.coord("longitude").points == x_val
- )[0][0]
+
# Use slightly-different values to test nearest-neighbour operation.
- self.single_sample_point = [
+ sample_point = [
("latitude", [y_val + 19.23]),
("longitude", [x_val - 17.54]),
]
- def test_single_point_same_cube(self):
- # Check exact result matching for a single point.
- cube = self.test_cube
- result = interpolate(cube, self.single_sample_point, method="nearest")
- # Check that the result is a single trajectory point, exactly equal to
- # the expected part of the original data.
- self.assertEqual(result.shape[-1], 1)
- result = result[..., 0]
- expected = cube[:, self.single_point_iy, self.single_point_ix]
- self.assertEqual(result, expected)
+ # Work out cube indices of the testpoint.
+ single_point_iy = np.where(src_cube.coord("latitude").points == y_val)[
+ 0
+ ][0]
+ single_point_ix = np.where(
+ src_cube.coord("longitude").points == x_val
+ )[0][0]
- def test_multi_point_same_cube(self):
- # Check an exact result for multiple points.
- cube = self.test_cube
+ point = namedtuple("point", "ix iy sample_point")
+ return point(single_point_ix, single_point_iy, sample_point)
+
+ @pytest.fixture
+ def multi_sample_points(self):
# Use latitude selection to recreate a whole row of the original cube.
- sample_points = [
+ return [
("longitude", [-180, -90, 0, 90]),
("latitude", [0, 0, 0, 0]),
]
- result = interpolate(cube, sample_points, method="nearest")
+ @pytest.fixture
+ def expected_multipoint_cube(self, src_cube):
# The result should be identical to a single latitude section of the
# original, but with modified coords (latitude has 4 repeated zeros).
- expected = cube[:, 1, :]
+ expected = src_cube[:, 1, :]
# Result 'longitude' is now an aux coord.
co_x = expected.coord("longitude")
expected.remove_coord(co_x)
@@ -104,36 +104,86 @@ def test_multi_point_same_cube(self):
[0, 0, 0, 0], standard_name="latitude", units="degrees"
)
expected.add_aux_coord(co_y, 1)
- self.assertEqual(result, expected)
- def test_aux_coord_noninterpolation_dim(self):
+ return expected
+
+ def test_single_point_same_cube(self, src_cube, single_point):
+ # Check exact result matching for a single point.
+ result = interpolate(
+ src_cube, single_point.sample_point, method="nearest"
+ )
+ # Check that the result is a single trajectory point, exactly equal to
+ # the expected part of the original data.
+ assert result.shape[-1] == 1
+ result = result[..., 0]
+ expected = src_cube[:, single_point.iy, single_point.ix]
+ assert result == expected
+
+ def test_multi_point_same_cube(
+ self, src_cube, multi_sample_points, expected_multipoint_cube
+ ):
+ # Check an exact result for multiple points.
+ result = interpolate(src_cube, multi_sample_points, method="nearest")
+ assert result == expected_multipoint_cube
+
+ def test_mask_preserved(
+ self, src_cube, multi_sample_points, expected_multipoint_cube
+ ):
+ mask = np.zeros_like(src_cube.data)
+ mask[:, :, 1] = 1
+ src_cube.data = np.ma.array(src_cube.data, mask=mask)
+
+ expected_multipoint_cube.data = np.ma.array(
+ expected_multipoint_cube.data, mask=mask[:, 0]
+ )
+
+ result = interpolate(src_cube, multi_sample_points, method="nearest")
+ assert result == expected_multipoint_cube
+ assert np.allclose(
+ result.data.mask, expected_multipoint_cube.data.mask
+ )
+
+ def test_dtype_preserved(
+ self, src_cube, multi_sample_points, expected_multipoint_cube
+ ):
+ src_cube.data = src_cube.data.astype(np.int16)
+
+ result = interpolate(src_cube, multi_sample_points, method="nearest")
+ assert result == expected_multipoint_cube
+ assert np.allclose(result.data, expected_multipoint_cube.data)
+ assert result.data.dtype == np.int16
+
+ def test_aux_coord_noninterpolation_dim(self, src_cube, single_point):
# Check exact result with an aux-coord mapped to an uninterpolated dim.
- cube = self.test_cube
- cube.add_aux_coord(DimCoord([17, 19], long_name="aux0"), 0)
+ src_cube.add_aux_coord(DimCoord([17, 19], long_name="aux0"), 0)
# The result cube should exactly equal a single source point.
- result = interpolate(cube, self.single_sample_point, method="nearest")
- self.assertEqual(result.shape[-1], 1)
+ result = interpolate(
+ src_cube, single_point.sample_point, method="nearest"
+ )
+ assert result.shape[-1] == 1
result = result[..., 0]
- expected = cube[:, self.single_point_iy, self.single_point_ix]
- self.assertEqual(result, expected)
+ expected = src_cube[:, single_point.iy, single_point.ix]
+ assert result == expected
- def test_aux_coord_one_interp_dim(self):
+ def test_aux_coord_one_interp_dim(self, src_cube, single_point):
# Check exact result with an aux-coord over one interpolation dims.
- cube = self.test_cube
- cube.add_aux_coord(AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2)
+ src_cube.add_aux_coord(
+ AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2
+ )
# The result cube should exactly equal a single source point.
- result = interpolate(cube, self.single_sample_point, method="nearest")
- self.assertEqual(result.shape[-1], 1)
+ result = interpolate(
+ src_cube, single_point.sample_point, method="nearest"
+ )
+ assert result.shape[-1] == 1
result = result[..., 0]
- expected = cube[:, self.single_point_iy, self.single_point_ix]
- self.assertEqual(result, expected)
+ expected = src_cube[:, single_point.iy, single_point.ix]
+ assert result == expected
- def test_aux_coord_both_interp_dims(self):
+ def test_aux_coord_both_interp_dims(self, src_cube, single_point):
# Check exact result with an aux-coord over both interpolation dims.
- cube = self.test_cube
- cube.add_aux_coord(
+ src_cube.add_aux_coord(
AuxCoord(
[[11, 12, 13, 14], [21, 22, 23, 24], [31, 32, 33, 34]],
long_name="aux_xy",
@@ -142,17 +192,18 @@ def test_aux_coord_both_interp_dims(self):
)
# The result cube should exactly equal a single source point.
- result = interpolate(cube, self.single_sample_point, method="nearest")
- self.assertEqual(result.shape[-1], 1)
+ result = interpolate(
+ src_cube, single_point.sample_point, method="nearest"
+ )
+ assert result.shape[-1] == 1
result = result[..., 0]
- expected = cube[:, self.single_point_iy, self.single_point_ix]
- self.assertEqual(result, expected)
+ expected = src_cube[:, single_point.iy, single_point.ix]
+ assert result == expected
- def test_aux_coord_fail_mixed_dims(self):
+ def test_aux_coord_fail_mixed_dims(self, src_cube, single_point):
# Check behaviour with an aux-coord mapped over both interpolation and
# non-interpolation dims : not supported.
- cube = self.test_cube
- cube.add_aux_coord(
+ src_cube.add_aux_coord(
AuxCoord(
[[111, 112, 113, 114], [211, 212, 213, 214]],
long_name="aux_0x",
@@ -163,22 +214,23 @@ def test_aux_coord_fail_mixed_dims(self):
"Coord aux_0x at one x-y position has the shape.*"
"instead of being a single point"
)
- with self.assertRaisesRegex(ValueError, msg):
- interpolate(cube, self.single_sample_point, method="nearest")
+ with pytest.raises(ValueError, match=msg):
+ interpolate(src_cube, single_point.sample_point, method="nearest")
- def test_metadata(self):
+ def test_metadata(self, src_cube, single_point):
# Check exact result matching for a single point, with additional
# attributes and cell-methods.
- cube = self.test_cube
- cube.attributes["ODD_ATTR"] = "string-value-example"
- cube.add_cell_method(iris.coords.CellMethod("mean", "area"))
- result = interpolate(cube, self.single_sample_point, method="nearest")
+ src_cube.attributes["ODD_ATTR"] = "string-value-example"
+ src_cube.add_cell_method(iris.coords.CellMethod("mean", "area"))
+ result = interpolate(
+ src_cube, single_point.sample_point, method="nearest"
+ )
# Check that the result is a single trajectory point, exactly equal to
# the expected part of the original data.
- self.assertEqual(result.shape[-1], 1)
+ assert result.shape[-1] == 1
result = result[..., 0]
- expected = cube[:, self.single_point_iy, self.single_point_ix]
- self.assertEqual(result, expected)
+ expected = src_cube[:, single_point.iy, single_point.ix]
+ assert result == expected
class TestLinear(tests.IrisTest):
diff --git a/lib/iris/tests/unit/constraints/test_Constraint_equality.py b/lib/iris/tests/unit/constraints/test_Constraint_equality.py
new file mode 100644
index 0000000000..01e61b70a7
--- /dev/null
+++ b/lib/iris/tests/unit/constraints/test_Constraint_equality.py
@@ -0,0 +1,274 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Unit tests for equality testing of different constraint types."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+from iris._constraints import AttributeConstraint, Constraint, NameConstraint
+
+
+class Test_Constraint__hash__(tests.IrisTest):
+ def test_empty(self):
+ c1 = Constraint()
+ c2 = Constraint()
+ self.assertEqual(hash(c1), hash(c1))
+ self.assertNotEqual(hash(c1), hash(c2))
+
+
+class Test_Constraint__eq__(tests.IrisTest):
+ def test_empty_same(self):
+ c1 = Constraint()
+ c2 = Constraint()
+ self.assertEqual(c1, c2)
+ self.assertIsNot(c1, c2)
+
+ def test_emptyname_same(self):
+ c1 = Constraint("")
+ c2 = Constraint("")
+ self.assertEqual(c1, c2)
+
+ def test_empty_emptyname_differ(self):
+ c1 = Constraint()
+ c2 = Constraint("")
+ self.assertNotEqual(c1, c2)
+
+ def test_names_same(self):
+ c1 = Constraint("a")
+ c2 = Constraint("a")
+ self.assertEqual(c1, c2)
+
+ def test_names_differ(self):
+ c1 = Constraint("a")
+ c2 = Constraint("b")
+ self.assertNotEqual(c1, c2)
+
+ def test_funcs_same(self):
+ # *Same* functions match
+ def func(cube):
+ return False
+
+ c1 = Constraint(cube_func=func)
+ c2 = Constraint(cube_func=func)
+ self.assertEqual(c1, c2)
+
+ def test_funcs_differ(self):
+ # Identical but different funcs do not match.
+ c1 = Constraint(cube_func=lambda c: False)
+ c2 = Constraint(cube_func=lambda c: False)
+ self.assertNotEqual(c1, c2)
+
+ def test_coord_names_same(self):
+ c1 = Constraint(some_coordname=3)
+ c2 = Constraint(some_coordname=3)
+ self.assertEqual(c1, c2)
+
+ def test_coord_names_differ(self):
+ c1 = Constraint(some_coordname_A=3)
+ c2 = Constraint(some_coordname_B=3)
+ self.assertNotEqual(c1, c2)
+
+ def test_coord_values_differ(self):
+ c1 = Constraint(some_coordname=3)
+ c2 = Constraint(some_coordname=4)
+ self.assertNotEqual(c1, c2)
+
+ def test_coord_orders_differ(self):
+ # We *could* maybe ignore Coordinate order, but at present we don't.
+ c1 = Constraint(coordname_1=1, coordname_2=2)
+ c2 = Constraint(coordname_2=2, coordname_1=1)
+ self.assertNotEqual(c1, c2)
+
+ def test_coord_values_functions_same(self):
+ def func(coord):
+ return False
+
+ c1 = Constraint(some_coordname=func)
+ c2 = Constraint(some_coordname=func)
+ self.assertEqual(c1, c2)
+
+ def test_coord_values_functions_differ(self):
+ # Identical functions are not the same.
+ c1 = Constraint(some_coordname=lambda c: True)
+ c2 = Constraint(some_coordname=lambda c: True)
+ self.assertNotEqual(c1, c2)
+
+ def test_coord_values_and_keys_same(self):
+ # **kwargs and 'coord_values=' are combined without distinction.
+ c1 = Constraint(coord_values={"a": [2, 3]})
+ c2 = Constraint(a=[2, 3])
+ self.assertEqual(c1, c2)
+
+
+class Test_AttributeConstraint__hash__(tests.IrisTest):
+ def test_empty(self):
+ c1 = AttributeConstraint()
+ c2 = AttributeConstraint()
+ self.assertEqual(hash(c1), hash(c1))
+ self.assertNotEqual(hash(c1), hash(c2))
+
+
+class Test_AttributeConstraint__eq__(tests.IrisTest):
+ def test_empty_same(self):
+ c1 = AttributeConstraint()
+ c2 = AttributeConstraint()
+ self.assertEqual(c1, c2)
+ self.assertIsNot(c1, c2)
+
+ def test_attribute_plain_empty_diff(self):
+ c1 = AttributeConstraint()
+ c2 = Constraint()
+ self.assertNotEqual(c1, c2)
+
+ def test_names_same(self):
+ c1 = AttributeConstraint(a=1)
+ c2 = AttributeConstraint(a=1)
+ self.assertEqual(c1, c2)
+
+ def test_names_diff(self):
+ c1 = AttributeConstraint(a=1)
+ c2 = AttributeConstraint(a=1, b=1)
+ self.assertNotEqual(c1, c2)
+
+ def test_values_diff(self):
+ c1 = AttributeConstraint(a=1, b=1)
+ c2 = AttributeConstraint(a=1, b=2)
+ self.assertNotEqual(c1, c2)
+
+ def test_func_same(self):
+ def func(attrs):
+ return False
+
+ c1 = AttributeConstraint(a=func)
+ c2 = AttributeConstraint(a=func)
+ self.assertEqual(c1, c2)
+
+ def test_func_diff(self):
+ c1 = AttributeConstraint(a=lambda a: False)
+ c2 = AttributeConstraint(a=lambda a: False)
+ self.assertNotEqual(c1, c2)
+
+
+class Test_NameConstraint__hash__(tests.IrisTest):
+ def test_empty(self):
+ c1 = NameConstraint()
+ c2 = NameConstraint()
+ self.assertEqual(hash(c1), hash(c1))
+ self.assertNotEqual(hash(c1), hash(c2))
+
+
+class Test_NameConstraint__eq__(tests.IrisTest):
+ def test_empty_same(self):
+ c1 = NameConstraint()
+ c2 = NameConstraint()
+ self.assertEqual(c1, c2)
+ self.assertIsNot(c1, c2)
+
+ def test_attribute_plain_empty_diff(self):
+ c1 = NameConstraint()
+ c2 = Constraint()
+ self.assertNotEqual(c1, c2)
+
+ def test_names_same(self):
+ c1 = NameConstraint(standard_name="air_temperature")
+ c2 = NameConstraint(standard_name="air_temperature")
+ self.assertEqual(c1, c2)
+
+ def test_full_same(self):
+ c1 = NameConstraint(
+ standard_name="air_temperature",
+ long_name="temp",
+ var_name="tair",
+ STASH="m01s02i003",
+ )
+ c2 = NameConstraint(
+ standard_name="air_temperature",
+ long_name="temp",
+ var_name="tair",
+ STASH="m01s02i003",
+ )
+ self.assertEqual(c1, c2)
+
+ def test_missing_diff(self):
+ c1 = NameConstraint(standard_name="air_temperature", var_name="tair")
+ c2 = NameConstraint(standard_name="air_temperature")
+ self.assertNotEqual(c1, c2)
+
+ def test_standard_name_diff(self):
+ c1 = NameConstraint(standard_name="air_temperature")
+ c2 = NameConstraint(standard_name="height")
+ self.assertNotEqual(c1, c2)
+
+ def test_long_name_diff(self):
+ c1 = NameConstraint(long_name="temp")
+ c2 = NameConstraint(long_name="t3")
+ self.assertNotEqual(c1, c2)
+
+ def test_var_name_diff(self):
+ c1 = NameConstraint(var_name="tair")
+ c2 = NameConstraint(var_name="xxx")
+ self.assertNotEqual(c1, c2)
+
+ def test_stash_diff(self):
+ c1 = NameConstraint(STASH="m01s02i003")
+ c2 = NameConstraint(STASH="m01s02i777")
+ self.assertNotEqual(c1, c2)
+
+ def test_func_same(self):
+ def func(name):
+ return True
+
+ c1 = NameConstraint(STASH="m01s02i003", long_name=func)
+ c2 = NameConstraint(STASH="m01s02i003", long_name=func)
+ self.assertEqual(c1, c2)
+
+ def test_func_diff(self):
+ c1 = NameConstraint(STASH="m01s02i003", long_name=lambda n: True)
+ c2 = NameConstraint(STASH="m01s02i003", long_name=lambda n: True)
+ self.assertNotEqual(c1, c2)
+
+
+class Test_ConstraintCombination__hash__(tests.IrisTest):
+ def test_empty(self):
+ c1 = Constraint() & Constraint()
+ c2 = Constraint() & Constraint()
+ self.assertEqual(hash(c1), hash(c1))
+ self.assertNotEqual(hash(c1), hash(c2))
+
+ def test_identical_construction(self):
+ c1, c2 = Constraint(a=1), Constraint(b=1)
+ cc1 = c1 & c2
+ cc2 = c1 & c2
+ self.assertNotEqual(hash(cc1), hash(cc2))
+
+
+class Test_ConstraintCombination__eq__(tests.IrisTest):
+ def test_empty_same(self):
+ c1 = Constraint() & Constraint()
+ c2 = Constraint() & Constraint()
+ self.assertEqual(c1, c2)
+ self.assertIsNot(c1, c2)
+
+ def test_multi_components_same(self):
+ c1 = Constraint("a") & Constraint(b=1)
+ c2 = Constraint("a") & Constraint(b=1)
+ self.assertEqual(c1, c2)
+
+ def test_multi_components_diff(self):
+ c1 = Constraint("a") & Constraint(b=1, c=2)
+ c2 = Constraint("a") & Constraint(b=1)
+ self.assertNotEqual(c1, c2)
+
+ def test_different_component_order(self):
+ c1, c2 = Constraint("a"), Constraint(b=1)
+ cc1 = c1 & c2
+ cc2 = c2 & c1
+ self.assertNotEqual(cc1, cc2)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py
index 75b6250449..e5fc8fd28a 100644
--- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py
+++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py
@@ -68,7 +68,7 @@ def setUp(self):
self.setupTestArrays(masked=True)
def test_lazyness_and_dtype_combinations(self):
- for (ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self,
):
data = ancill_var.core_data()
@@ -225,10 +225,9 @@ def test_dtypes(self):
# floating dtype.
# Check that dtypes remain the same in all cases, taking the dtypes
# directly from the core data as we have no masking).
- for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self
):
-
sub_ancill_var = main_ancill_var[:2, 1]
ancill_var_dtype = main_ancill_var.dtype
@@ -250,10 +249,9 @@ def test_lazyness(self):
# Index ancillary variables with real+lazy data, and either an int or
# floating dtype.
# Check that lazy data stays lazy and real stays real, in all cases.
- for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self
):
-
sub_ancill_var = main_ancill_var[:2, 1]
msg = (
@@ -277,10 +275,9 @@ def test_lazyness(self):
def test_real_data_copies(self):
# Index ancillary variables with real+lazy data.
# In all cases, check that any real arrays are copied by the indexing.
- for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self
):
-
sub_ancill_var = main_ancill_var[:2, 1]
msg = (
@@ -308,10 +305,9 @@ def test_lazyness(self):
# Copy ancillary variables with real+lazy data, and either an int or
# floating dtype.
# Check that lazy data stays lazy and real stays real, in all cases.
- for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self
):
-
ancill_var_dtype = main_ancill_var.dtype
copied_ancill_var = main_ancill_var.copy()
@@ -338,10 +334,9 @@ def test_lazyness(self):
def test_realdata_copies(self):
# Copy ancillary variables with real+lazy data.
# In all cases, check that any real arrays are copies, not views.
- for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses(
+ for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(
self
):
-
copied_ancill_var = main_ancill_var.copy()
msg = (
@@ -520,79 +515,79 @@ def _check(self, result_ancill_var, expected_data, lazyness):
self.assertEqualLazyArraysAndDtypes(expected_data, data)
def test_add(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = ancill_var + 10
expected_data = orig_data + 10
self._check(result, expected_data, data_lazyness)
def test_add_inplace(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
ancill_var += 10
expected_data = orig_data + 10
self._check(ancill_var, expected_data, data_lazyness)
def test_right_add(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = 10 + ancill_var
expected_data = 10 + orig_data
self._check(result, expected_data, data_lazyness)
def test_subtract(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = ancill_var - 10
expected_data = orig_data - 10
self._check(result, expected_data, data_lazyness)
def test_subtract_inplace(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
ancill_var -= 10
expected_data = orig_data - 10
self._check(ancill_var, expected_data, data_lazyness)
def test_right_subtract(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = 10 - ancill_var
expected_data = 10 - orig_data
self._check(result, expected_data, data_lazyness)
def test_multiply(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = ancill_var * 10
expected_data = orig_data * 10
self._check(result, expected_data, data_lazyness)
def test_multiply_inplace(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
ancill_var *= 10
expected_data = orig_data * 10
self._check(ancill_var, expected_data, data_lazyness)
def test_right_multiply(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = 10 * ancill_var
expected_data = 10 * orig_data
self._check(result, expected_data, data_lazyness)
def test_divide(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = ancill_var / 10
expected_data = orig_data / 10
self._check(result, expected_data, data_lazyness)
def test_divide_inplace(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
ancill_var /= 10
expected_data = orig_data / 10
self._check(ancill_var, expected_data, data_lazyness)
def test_right_divide(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = 10 / ancill_var
expected_data = 10 / orig_data
self._check(result, expected_data, data_lazyness)
def test_negative(self):
- for (ancill_var, orig_data, data_lazyness) in self.test_combinations:
+ for ancill_var, orig_data, data_lazyness in self.test_combinations:
result = -ancill_var
expected_data = -orig_data
self._check(result, expected_data, data_lazyness)
diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py
index e6cd8ac821..e5147659fc 100644
--- a/lib/iris/tests/unit/coords/test_AuxCoord.py
+++ b/lib/iris/tests/unit/coords/test_AuxCoord.py
@@ -370,7 +370,6 @@ def test_dtypes(self):
points_type_name,
bounds_type_name,
) in coords_all_dtypes_and_lazynesses(self, AuxCoord):
-
sub_coord = main_coord[:2, 1]
coord_dtype = main_coord.dtype
@@ -417,7 +416,6 @@ def test_lazyness(self):
points_type_name,
bounds_type_name,
) in coords_all_dtypes_and_lazynesses(self, AuxCoord):
-
sub_coord = main_coord[:2, 1]
msg = (
@@ -463,7 +461,6 @@ def test_real_data_copies(self):
points_lazyness,
bounds_lazyness,
) in coords_all_dtypes_and_lazynesses(self, AuxCoord):
-
sub_coord = main_coord[:2, 1]
msg = (
@@ -511,7 +508,6 @@ def test_lazyness(self):
points_lazyness,
bounds_lazyness,
) in coords_all_dtypes_and_lazynesses(self, AuxCoord):
-
coord_dtype = main_coord.dtype
copied_coord = main_coord.copy()
@@ -558,7 +554,6 @@ def test_realdata_copies(self):
points_lazyness,
bounds_lazyness,
) in coords_all_dtypes_and_lazynesses(self, AuxCoord):
-
copied_coord = main_coord.copy()
msg = (
diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py
index 650f9ded6c..81370bd0de 100644
--- a/lib/iris/tests/unit/coords/test_Cell.py
+++ b/lib/iris/tests/unit/coords/test_Cell.py
@@ -30,33 +30,6 @@ def assert_raises_on_comparison(self, cell, other, exception_type, regexp):
with self.assertRaisesRegex(exception_type, regexp):
cell >= other
- def test_cftime_cell(self):
- # Check that cell comparison when the cell contains
- # cftime.datetime objects raises an exception otherwise
- # this will fall back to id comparison producing unreliable
- # results.
- cell = Cell(cftime.datetime(2010, 3, 21))
- dt = mock.Mock(timetuple=mock.Mock())
- self.assert_raises_on_comparison(
- cell, dt, TypeError, "determine the order of cftime"
- )
- self.assert_raises_on_comparison(
- cell, 23, TypeError, "determine the order of cftime"
- )
- self.assert_raises_on_comparison(
- cell, "hello", TypeError, "Unexpected type.*str"
- )
-
- def test_cftime_other(self):
- # Check that cell comparison to a cftime.datetime object
- # raises an exception otherwise this will fall back to id comparison
- # producing unreliable results.
- dt = cftime.datetime(2010, 3, 21)
- cell = Cell(mock.Mock(timetuple=mock.Mock()))
- self.assert_raises_on_comparison(
- cell, dt, TypeError, "determine the order of cftime"
- )
-
def test_PartialDateTime_bounded_cell(self):
# Check that bounded comparisions to a PartialDateTime
# raise an exception. These are not supported as they
@@ -85,10 +58,9 @@ def test_PartialDateTime_unbounded_cell(self):
def test_datetime_unbounded_cell(self):
# Check that cell comparison works with datetimes.
dt = datetime.datetime(2000, 6, 15)
- cell = Cell(datetime.datetime(2000, 1, 1))
- # Note the absence of the inverse of these
- # e.g. self.assertGreater(dt, cell).
- # See http://bugs.python.org/issue8005
+ cell = Cell(cftime.datetime(2000, 1, 1))
+ self.assertGreater(dt, cell)
+ self.assertGreaterEqual(dt, cell)
self.assertLess(cell, dt)
self.assertLessEqual(cell, dt)
diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py
index dca6ed3c1b..72a48437ec 100644
--- a/lib/iris/tests/unit/coords/test_Coord.py
+++ b/lib/iris/tests/unit/coords/test_Coord.py
@@ -463,7 +463,6 @@ def test_lazy_nd_bounds_last(self):
)
def test_lazy_nd_points_and_bounds(self):
-
self.setupTestArrays((3, 4))
coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy)
@@ -520,7 +519,6 @@ def test_lazy_nd_noncontiguous_bounds_warning(self):
coord.collapsed()
def test_numeric_3_bounds(self):
-
points = np.array([2.0, 6.0, 4.0])
bounds = np.array([[1.0, 0.0, 3.0], [5.0, 4.0, 7.0], [3.0, 2.0, 5.0]])
@@ -544,7 +542,6 @@ def test_numeric_3_bounds(self):
)
def test_lazy_3_bounds(self):
-
points = da.arange(3) * 2.0
bounds = da.arange(3 * 3).reshape(3, 3)
diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py
index 4298b140ea..dd0ba48f3d 100644
--- a/lib/iris/tests/unit/coords/test_DimCoord.py
+++ b/lib/iris/tests/unit/coords/test_DimCoord.py
@@ -304,7 +304,6 @@ def test_dtypes(self):
points_type_name,
bounds_type_name,
) in coords_all_dtypes_and_lazynesses(self, DimCoord):
-
sub_coord = main_coord[:2]
coord_dtype = main_coord.dtype
@@ -404,7 +403,6 @@ def test_real_data_copies(self):
points_lazyness,
bounds_lazyness,
) in coords_all_dtypes_and_lazynesses(self, DimCoord):
-
sub_coord = main_coord[:2]
msg = (
@@ -470,7 +468,6 @@ def test_realdata_readonly(self):
points_type_name,
bounds_type_name,
) in coords_all_dtypes_and_lazynesses(self, DimCoord):
-
copied_coord = main_coord.copy()
copied_points = copied_coord.core_points()
diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py
index f9316ff92c..83fcbc4512 100644
--- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py
+++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py
@@ -911,11 +911,11 @@ def test_meshcoord(self):
result = self.repr_str_strings(meshco)
expected = [
(
- ""
),
- "MeshCoord : longitude / (degrees_east)",
+ "MeshCoord : longitude / (unknown)",
" mesh: ",
" location: 'face'",
" points: [3100, 3101, 3102]",
@@ -926,10 +926,6 @@ def test_meshcoord(self):
" shape: (3,) bounds(3, 4)",
" dtype: int64",
" standard_name: 'longitude'",
- " long_name: 'long-name'",
- " attributes:",
- " a 1",
- " b 'c'",
" axis: 'x'",
]
self.assertLines(expected, result)
diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py
index f38d6ef35d..8e9e00dce8 100644
--- a/lib/iris/tests/unit/cube/test_Cube.py
+++ b/lib/iris/tests/unit/cube/test_Cube.py
@@ -9,6 +9,7 @@
# importing anything else.
import iris.tests as tests # isort:skip
+from collections import namedtuple
from itertools import permutations
from unittest import mock
@@ -1864,12 +1865,14 @@ class Test_copy(tests.IrisTest):
def _check_copy(self, cube, cube_copy):
self.assertIsNot(cube_copy, cube)
self.assertEqual(cube_copy, cube)
- self.assertIsNot(cube_copy.data, cube.data)
+ self.assertIsNot(cube_copy.core_data(), cube.core_data())
if ma.isMaskedArray(cube.data):
self.assertMaskedArrayEqual(cube_copy.data, cube.data)
if cube.data.mask is not ma.nomask:
# "No mask" is a constant : all other cases must be distinct.
- self.assertIsNot(cube_copy.data.mask, cube.data.mask)
+ self.assertIsNot(
+ cube_copy.core_data().mask, cube.core_data().mask
+ )
else:
self.assertArrayEqual(cube_copy.data, cube.data)
@@ -1910,6 +1913,9 @@ def test__masked_scalar_arraymask(self):
self._check_copy(cube, cube.copy())
def test__lazy(self):
+ # 2022-11-02: Dask's current behaviour is that the computed array will
+ # be the same for cube and cube.copy(), even if the Dask arrays are
+ # different.
cube = Cube(as_lazy_data(np.array([1, 0])))
self._check_copy(cube, cube.copy())
@@ -1956,6 +1962,7 @@ def _assert_lists_equal(self, items_a, items_b):
a different order.
"""
+
# Compare (and thus sort) by their *common* metadata.
def sortkey(item):
return BaseMetadata.from_metadata(item.metadata)
@@ -2527,6 +2534,25 @@ def test_fail_remove_ancilliary_variable_by_name(self):
self.cube.remove_ancillary_variable("notname")
+class TestCoords(tests.IrisTest):
+ def setUp(self):
+ cube = Cube(np.arange(6).reshape(2, 3))
+ x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x")
+ cube.add_dim_coord(x_coord, 1)
+ self.x_coord = x_coord
+ self.cube = cube
+
+ def test_bad_coord(self):
+ bad_coord = self.x_coord.copy()
+ bad_coord.attributes = {"bad": "attribute"}
+ re = (
+ "Expected to find exactly 1 coordinate matching the given "
+ "'x' coordinate's metadata, but found none."
+ )
+ with self.assertRaisesRegex(CoordinateNotFoundError, re):
+ _ = self.cube.coord(bad_coord)
+
+
class Test__getitem_CellMeasure(tests.IrisTest):
def setUp(self):
cube = Cube(np.arange(6).reshape(2, 3))
@@ -2937,64 +2963,254 @@ def test_cell_method_correct_order(self):
self.assertTrue(cube1 == cube2)
+@pytest.fixture
+def simplecube():
+ return stock.simple_2d_w_cell_measure_ancil_var()
+
+
class Test__dimensional_metadata:
- @pytest.fixture
- def cube(self):
- return stock.simple_2d_w_cell_measure_ancil_var()
+ """
+ Tests for the "Cube._dimensional_data" method.
- def test_not_found(self, cube):
+ NOTE: test could all be static methods, but that adds a line to each definition.
+ """
+
+ def test_not_found(self, simplecube):
with pytest.raises(KeyError, match="was not found in"):
- cube._dimensional_metadata("grid_latitude")
+ simplecube._dimensional_metadata("grid_latitude")
- def test_dim_coord_name_found(self, cube):
- res = cube._dimensional_metadata("bar")
- assert res == cube.coord("bar")
+ def test_dim_coord_name_found(self, simplecube):
+ res = simplecube._dimensional_metadata("bar")
+ assert res == simplecube.coord("bar")
- def test_dim_coord_instance_found(self, cube):
- res = cube._dimensional_metadata(cube.coord("bar"))
- assert res == cube.coord("bar")
+ def test_dim_coord_instance_found(self, simplecube):
+ res = simplecube._dimensional_metadata(simplecube.coord("bar"))
+ assert res == simplecube.coord("bar")
- def test_aux_coord_name_found(self, cube):
- res = cube._dimensional_metadata("wibble")
- assert res == cube.coord("wibble")
+ def test_aux_coord_name_found(self, simplecube):
+ res = simplecube._dimensional_metadata("wibble")
+ assert res == simplecube.coord("wibble")
- def test_aux_coord_instance_found(self, cube):
- res = cube._dimensional_metadata(cube.coord("wibble"))
- assert res == cube.coord("wibble")
+ def test_aux_coord_instance_found(self, simplecube):
+ res = simplecube._dimensional_metadata(simplecube.coord("wibble"))
+ assert res == simplecube.coord("wibble")
- def test_cell_measure_name_found(self, cube):
- res = cube._dimensional_metadata("cell_area")
- assert res == cube.cell_measure("cell_area")
+ def test_cell_measure_name_found(self, simplecube):
+ res = simplecube._dimensional_metadata("cell_area")
+ assert res == simplecube.cell_measure("cell_area")
- def test_cell_measure_instance_found(self, cube):
- res = cube._dimensional_metadata(cube.cell_measure("cell_area"))
- assert res == cube.cell_measure("cell_area")
+ def test_cell_measure_instance_found(self, simplecube):
+ res = simplecube._dimensional_metadata(
+ simplecube.cell_measure("cell_area")
+ )
+ assert res == simplecube.cell_measure("cell_area")
- def test_ancillary_var_name_found(self, cube):
- res = cube._dimensional_metadata("quality_flag")
- assert res == cube.ancillary_variable("quality_flag")
+ def test_ancillary_var_name_found(self, simplecube):
+ res = simplecube._dimensional_metadata("quality_flag")
+ assert res == simplecube.ancillary_variable("quality_flag")
- def test_ancillary_var_instance_found(self, cube):
- res = cube._dimensional_metadata(
- cube.ancillary_variable("quality_flag")
+ def test_ancillary_var_instance_found(self, simplecube):
+ res = simplecube._dimensional_metadata(
+ simplecube.ancillary_variable("quality_flag")
)
- assert res == cube.ancillary_variable("quality_flag")
+ assert res == simplecube.ancillary_variable("quality_flag")
- def test_two_with_same_name(self, cube):
+ def test_two_with_same_name(self, simplecube):
# If a cube has two _DimensionalMetadata objects with the same name, the
# current behaviour results in _dimensional_metadata returning the first
# one it finds.
- cube.cell_measure("cell_area").rename("wibble")
- res = cube._dimensional_metadata("wibble")
- assert res == cube.coord("wibble")
+ simplecube.cell_measure("cell_area").rename("wibble")
+ res = simplecube._dimensional_metadata("wibble")
+ assert res == simplecube.coord("wibble")
- def test_two_with_same_name_specify_instance(self, cube):
+ def test_two_with_same_name_specify_instance(self, simplecube):
# The cube has two _DimensionalMetadata objects with the same name so
# we specify the _DimensionalMetadata instance to ensure it returns the
# correct one.
- cube.cell_measure("cell_area").rename("wibble")
- res = cube._dimensional_metadata(cube.cell_measure("wibble"))
- assert res == cube.cell_measure("wibble")
+ simplecube.cell_measure("cell_area").rename("wibble")
+ res = simplecube._dimensional_metadata(
+ simplecube.cell_measure("wibble")
+ )
+ assert res == simplecube.cell_measure("wibble")
+
+
+class TestReprs:
+ """
+ Confirm that str(cube), repr(cube) and cube.summary() work by creating a fresh
+ :class:`iris._representation.cube_printout.CubePrinter` object, and using it
+ in the expected ways.
+
+ Notes
+ -----
+ This only tests code connectivity. The functionality is tested elsewhere, in
+ `iris.tests.unit._representation.cube_printout.test_CubePrintout`.
+ """
+
+ # Note: logically this could be a staticmethod, but that seems to upset Pytest
+ @pytest.fixture
+ def patched_cubeprinter(self):
+ target = "iris._representation.cube_printout.CubePrinter"
+ instance_mock = mock.MagicMock(
+ to_string=mock.MagicMock(
+ return_value=""
+ ) # NB this must return a string
+ )
+ with mock.patch(target, return_value=instance_mock) as class_mock:
+ yield class_mock, instance_mock
+
+ @staticmethod
+ def _check_expected_effects(
+ simplecube, patched_cubeprinter, oneline, padding
+ ):
+ class_mock, instance_mock = patched_cubeprinter
+ assert class_mock.call_args_list == [
+ # "CubePrinter()" was called exactly once, with the cube as arg
+ mock.call(simplecube)
+ ]
+ assert instance_mock.to_string.call_args_list == [
+ # "CubePrinter(cube).to_string()" was called exactly once, with these args
+ mock.call(oneline=oneline, name_padding=padding)
+ ]
+
+ def test_str_effects(self, simplecube, patched_cubeprinter):
+ str(simplecube)
+ self._check_expected_effects(
+ simplecube, patched_cubeprinter, oneline=False, padding=35
+ )
+
+ def test_repr_effects(self, simplecube, patched_cubeprinter):
+ repr(simplecube)
+ self._check_expected_effects(
+ simplecube, patched_cubeprinter, oneline=True, padding=1
+ )
+
+ def test_summary_effects(self, simplecube, patched_cubeprinter):
+ simplecube.summary(
+ shorten=mock.sentinel.oneliner, name_padding=mock.sentinel.padding
+ )
+ self._check_expected_effects(
+ simplecube,
+ patched_cubeprinter,
+ oneline=mock.sentinel.oneliner,
+ padding=mock.sentinel.padding,
+ )
+
+
+class TestHtmlRepr:
+ """
+ Confirm that Cube._repr_html_() creates a fresh
+ :class:`iris.experimental.representation.CubeRepresentation` object, and uses it
+ in the expected way.
+
+ Notes
+ -----
+ This only tests code connectivity. The functionality is tested elsewhere, in
+ `iris.tests.unit.experimental.representation.test_CubeRepresentation`.
+ """
+
+ # Note: logically this could be a staticmethod, but that seems to upset Pytest
+ @pytest.fixture
+ def patched_cubehtml(self):
+ target = "iris.experimental.representation.CubeRepresentation"
+ instance_mock = mock.MagicMock(
+ repr_html=mock.MagicMock(
+ return_value=""
+ ) # NB this must return a string
+ )
+ with mock.patch(target, return_value=instance_mock) as class_mock:
+ yield class_mock, instance_mock
+
+ @staticmethod
+ def test__repr_html__effects(simplecube, patched_cubehtml):
+ simplecube._repr_html_()
+
+ class_mock, instance_mock = patched_cubehtml
+ assert class_mock.call_args_list == [
+ # "CubeRepresentation()" was called exactly once, with the cube as arg
+ mock.call(simplecube)
+ ]
+ assert instance_mock.repr_html.call_args_list == [
+ # "CubeRepresentation(cube).repr_html()" was called exactly once, with no args
+ mock.call()
+ ]
+
+
+class Test__cell_methods:
+ @pytest.fixture(autouse=True)
+ def cell_measures_testdata(self):
+ self.cube = Cube([0])
+ self.cm = CellMethod("mean", "time", "6hr")
+ self.cm2 = CellMethod("max", "latitude", "4hr")
+
+ def test_none(self):
+ assert self.cube.cell_methods == ()
+
+ def test_one(self):
+ cube = Cube([0], cell_methods=[self.cm])
+ expected = (self.cm,)
+ assert expected == cube.cell_methods
+
+ def test_empty_assigns(self):
+ testargs = [(), [], {}, 0, 0.0, False, None]
+ results = []
+ for arg in testargs:
+ cube = self.cube.copy()
+ cube.cell_methods = arg # assign test object
+ results.append(cube.cell_methods) # capture what is read back
+ expected_results = [()] * len(testargs)
+ assert expected_results == results
+
+ def test_single_assigns(self):
+ cms = (self.cm, self.cm2)
+ # Any type of iterable ought to work
+ # But N.B. *not* testing sets, as order is not stable
+ testargs = [cms, list(cms), {cm: 1 for cm in cms}]
+ results = []
+ for arg in testargs:
+ cube = self.cube.copy()
+ cube.cell_methods = arg # assign test object
+ results.append(cube.cell_methods) # capture what is read back
+ expected_results = [cms] * len(testargs)
+ assert expected_results == results
+
+ def test_fail_assign_noniterable(self):
+ test_object = object()
+ with pytest.raises(TypeError, match="not iterable"):
+ self.cube.cell_methods = test_object
+
+ def test_fail_create_noniterable(self):
+ test_object = object()
+ with pytest.raises(TypeError, match="not iterable"):
+ Cube([0], cell_methods=test_object)
+
+ def test_fail_assign_noncellmethod(self):
+ test_object = object()
+ with pytest.raises(ValueError, match="not an iris.coords.CellMethod"):
+ self.cube.cell_methods = (test_object,)
+
+ def test_fail_create_noncellmethod(self):
+ test_object = object()
+ with pytest.raises(ValueError, match="not an iris.coords.CellMethod"):
+ Cube([0], cell_methods=[test_object])
+
+ def test_assign_derivedcellmethod(self):
+ class DerivedCellMethod(CellMethod):
+ pass
+
+ test_object = DerivedCellMethod("mean", "time", "6hr")
+ cms = (test_object,)
+ self.cube.cell_methods = (test_object,)
+ assert cms == self.cube.cell_methods
+
+ def test_fail_assign_duckcellmethod(self):
+ # Can't currently assign a "duck-typed" CellMethod replacement, since
+ # implementation requires class membership (boo!)
+ DuckCellMethod = namedtuple("DuckCellMethod", CellMethod._names)
+ test_object = DuckCellMethod(
+ *CellMethod._names
+ ) # fill props with value==name
+ with pytest.raises(ValueError, match="not an iris.coords.CellMethod"):
+ self.cube.cell_methods = (test_object,)
if __name__ == "__main__":
diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py
index 1ebfe57773..86457d3888 100644
--- a/lib/iris/tests/unit/cube/test_CubeList.py
+++ b/lib/iris/tests/unit/cube/test_CubeList.py
@@ -735,5 +735,36 @@ def test_copy(self):
self.assertIsInstance(self.copied_cube_list, iris.cube.CubeList)
+class TestHtmlRepr:
+ """
+ Confirm that Cubelist._repr_html_() creates a fresh
+ :class:`iris.experimental.representation.CubeListRepresentation` object, and uses
+ it in the expected way.
+
+ Notes
+ -----
+ This only tests code connectivity. The functionality is tested elsewhere, at
+ `iris.tests.unit.experimental.representation.test_CubeListRepresentation`
+ """
+
+ @staticmethod
+ def test__repr_html_():
+ test_cubelist = CubeList([])
+
+ target = "iris.experimental.representation.CubeListRepresentation"
+ with mock.patch(target) as class_mock:
+ # Exercise the function-under-test.
+ test_cubelist._repr_html_()
+
+ assert class_mock.call_args_list == [
+ # "CubeListRepresentation()" was called exactly once, with the cubelist as arg
+ mock.call(test_cubelist)
+ ]
+ assert class_mock.return_value.repr_html.call_args_list == [
+ # "CubeListRepresentation(cubelist).repr_html()" was called exactly once, with no args
+ mock.call()
+ ]
+
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py
index 3230e3de00..9e60631c33 100644
--- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py
+++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py
@@ -22,6 +22,7 @@
from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord
from iris.cube import Cube
import iris.exceptions
+from iris.tests.stock import realistic_4d
class Test_aggregated_by(tests.IrisTest):
@@ -841,5 +842,52 @@ def test_clim_in_no_clim_op(self):
self.assertFalse(categorised_coord.climatological)
+class Test_aggregated_by__derived(tests.IrisTest):
+ def setUp(self):
+ self.cube = realistic_4d()[:, :10, :6, :8]
+ self.time_cat_coord = AuxCoord(
+ [0, 0, 1, 1, 2, 2], long_name="time_cat"
+ )
+ self.cube.add_aux_coord(self.time_cat_coord, 0)
+ height_data = np.zeros(self.cube.shape[1])
+ height_data[5:] = 1
+ self.height_cat_coord = AuxCoord(height_data, long_name="height_cat")
+ self.cube.add_aux_coord(self.height_cat_coord, 1)
+ self.aggregator = iris.analysis.MEAN
+
+ def test_grouped_dim(self):
+ """
+ Check that derived coordinates are maintained when the coordinates they
+ derive from are aggregated.
+ """
+ result = self.cube.aggregated_by(
+ self.height_cat_coord,
+ self.aggregator,
+ )
+ assert len(result.aux_factories) == 1
+ altitude = result.coord("altitude")
+ assert altitude.shape == (2, 6, 8)
+
+ # Check the bounds are derived as expected.
+ orig_alt_bounds = self.cube.coord("altitude").bounds
+ bounds_0 = orig_alt_bounds[0::5, :, :, 0]
+ bounds_1 = orig_alt_bounds[4::5, :, :, 1]
+ expected_bounds = np.stack([bounds_0, bounds_1], axis=-1)
+ assert np.array_equal(expected_bounds, result.coord("altitude").bounds)
+
+ def test_ungrouped_dim(self):
+ """
+ Check that derived coordinates are preserved when aggregating along a
+ different axis.
+ """
+ result = self.cube.aggregated_by(
+ self.time_cat_coord,
+ self.aggregator,
+ )
+ assert len(result.aux_factories) == 1
+ altitude = result.coord("altitude")
+ assert altitude == self.cube.coord("altitude")
+
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py
index ce99a8b4be..03e2793fd9 100644
--- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py
+++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py
@@ -16,9 +16,10 @@
import dask.array as da
import numpy as np
+import pytest
from iris._lazy_data import as_lazy_data, is_lazy_data
-from iris.common.metadata import BaseMetadata
+from iris.common.metadata import BaseMetadata, CoordMetadata
from iris.coords import AuxCoord, Coord
from iris.cube import Cube
from iris.experimental.ugrid.mesh import Connectivity, Mesh, MeshCoord
@@ -45,16 +46,11 @@ def test_derived_properties(self):
# underlying mesh coordinate.
for axis in Mesh.AXES:
meshcoord = sample_meshcoord(axis=axis)
- # N.B.
- node_x_coord = meshcoord.mesh.coord(include_nodes=True, axis=axis)
- for key in node_x_coord.metadata._fields:
+ face_x_coord = meshcoord.mesh.coord(include_faces=True, axis=axis)
+ for key in face_x_coord.metadata._fields:
meshval = getattr(meshcoord, key)
- if key == "var_name":
- # var_name is unused.
- self.assertIsNone(meshval)
- else:
- # names, units and attributes are derived from the node coord.
- self.assertEqual(meshval, getattr(node_x_coord, key))
+ # All relevant attributes are derived from the face coord.
+ self.assertEqual(meshval, getattr(face_x_coord, key))
def test_fail_bad_mesh(self):
with self.assertRaisesRegex(TypeError, "must be a.*Mesh"):
@@ -239,9 +235,7 @@ class Test__getitem__(tests.IrisTest):
def test_slice_wholeslice_1tuple(self):
# The only slicing case that we support, to enable cube slicing.
meshcoord = sample_meshcoord()
- meshcoord2 = meshcoord[
- :,
- ]
+ meshcoord2 = meshcoord[:,]
self.assertIsNot(meshcoord2, meshcoord)
self.assertEqual(meshcoord2, meshcoord)
# In this case, we should *NOT* copy the linked Mesh object.
@@ -270,10 +264,11 @@ def setUp(self):
def _expected_elements_regexp(
self,
standard_name="longitude",
- long_name="long-name",
- attributes=True,
+ long_name=None,
+ attributes=False,
location="face",
axis="x",
+ var_name=None,
):
# Printed name is standard or long -- we don't have a case with neither
coord_name = standard_name or long_name
@@ -282,24 +277,30 @@ def _expected_elements_regexp(
regexp = f"MeshCoord : {coord_name} / [^\n]+\n *"
regexp += r"mesh: \\n *"
regexp += f"location: '{location}'\n *"
+
# Now some optional sections : whichever comes first will match
# arbitrary content leading up to it.
- matched_any_upto = False
- if standard_name:
- regexp += ".*"
- matched_any_upto = True
- regexp += f"standard_name: '{standard_name}'\n *"
- if long_name:
+ matched_upto = False
+
+ def upto_first_expected(regexp, matched_any_upto):
if not matched_any_upto:
regexp += ".*"
matched_any_upto = True
+ return regexp, matched_any_upto
+
+ if standard_name:
+ regexp, matched_upto = upto_first_expected(regexp, matched_upto)
+ regexp += f"standard_name: '{standard_name}'\n *"
+ if long_name:
+ regexp, matched_upto = upto_first_expected(regexp, matched_upto)
regexp += f"long_name: '{long_name}'\n *"
+ if var_name:
+ regexp, matched_upto = upto_first_expected(regexp, matched_upto)
+ regexp += f"var_name: '{var_name}'\n *"
if attributes:
# if we expected attributes, they should come next
# TODO: change this when each attribute goes on a new line
- if not matched_any_upto:
- regexp += ".*"
- matched_any_upto = True
+ regexp, matched_upto = upto_first_expected(regexp, matched_upto)
# match 'attributes:' followed by N*lines with larger indent
regexp += "attributes:(\n [^ \n]+ +[^ \n]+)+\n "
# After those items, expect 'axis' next
@@ -314,7 +315,7 @@ def test_repr(self):
# A simple check for the condensed form.
result = repr(self.meshcoord)
expected = (
- ""
)
self.assertEqual(expected, result)
@@ -331,7 +332,7 @@ def test_repr_lazy(self):
self.assertTrue(self.meshcoord.has_lazy_bounds())
expected = (
- "+bounds shape(3,)>"
)
self.assertEqual(expected, result)
@@ -342,7 +343,7 @@ def test_repr__nameless_mesh(self):
assert self.mesh.name() == "unknown"
result = repr(self.meshcoord)
re_expected = (
- r".MeshCoord: longitude / \(degrees_east\) "
+ r".MeshCoord: longitude / \(unknown\) "
r"mesh\(.Mesh object at 0x[^>]+.\) location\(face\) "
)
self.assertRegex(result, re_expected)
@@ -392,18 +393,6 @@ def test_str_no_long_name(self):
re_expected = self._expected_elements_regexp(long_name=False)
self.assertRegex(result, re_expected)
- def test_str_no_standard_name(self):
- mesh = self.mesh
- # Remove the standard_name of the node coord in the mesh.
- node_coord = mesh.coord(include_nodes=True, axis="x")
- node_coord.standard_name = None
- node_coord.axis = "x" # This is required : but it's a kludge !!
- # Make a new meshcoord, based on the modified mesh.
- meshcoord = sample_meshcoord(mesh=self.mesh)
- result = str(meshcoord)
- re_expected = self._expected_elements_regexp(standard_name=False)
- self.assertRegex(result, re_expected)
-
def test_str_no_attributes(self):
mesh = self.mesh
# No attributes on the node coord in the mesh.
@@ -451,9 +440,11 @@ def test_cube_dims(self):
def test_find_by_name(self):
meshcoord = self.meshcoord
+ # hack to give it a long name
+ meshcoord.long_name = "odd_case"
cube = self.cube
self.assertIs(cube.coord(standard_name="longitude"), meshcoord)
- self.assertIs(cube.coord(long_name="long-name"), meshcoord)
+ self.assertIs(cube.coord(long_name="odd_case"), meshcoord)
def test_find_by_axis(self):
meshcoord = self.meshcoord
@@ -796,5 +787,157 @@ def test_bounds_badvalues__lazy(self):
self._check_bounds_bad_index_values(lazy=True)
+class Test__metadata:
+ def setup_mesh(self, location, axis):
+ # Create a standard test mesh + attach it to the test instance.
+ mesh = sample_mesh()
+
+ # Modify the metadata of specific coordinates used in this test.
+ def select_coord(location, axis):
+ kwargs = {f"include_{location}s": True, "axis": axis}
+ return mesh.coord(**kwargs)
+
+ node_coord = select_coord("node", axis)
+ location_coord = select_coord(location, axis)
+ for i_place, coord in enumerate((node_coord, location_coord)):
+ coord.standard_name = "longitude" if axis == "x" else "latitude"
+ coord.units = "degrees"
+ coord.long_name = f"long_name_{i_place}"
+ coord.var_name = f"var_name_{i_place}"
+ coord.attributes = {"att": i_place}
+
+ # attach all the relevant testcase context to the test instance.
+ self.mesh = mesh
+ self.location = location
+ self.axis = axis
+ self.location_coord = location_coord
+ self.node_coord = node_coord
+
+ def coord_metadata_matches(self, test_coord, ref_coord):
+ # Check that two coords match, in all the basic Coord identity/phenomenon
+ # metadata fields -- so it works even between coords of different subclasses.
+ for key in CoordMetadata._fields:
+ assert getattr(test_coord, key) == getattr(ref_coord, key)
+
+ @pytest.fixture(params=["face", "edge"])
+ def location_face_or_edge(self, request):
+ # Fixture to parametrise over location = face/edge
+ return request.param
+
+ @pytest.fixture(params=["x", "y"])
+ def axis_x_or_y(self, request):
+ # Fixture to parametrise over axis = X/Y
+ return request.param
+
+ def test_node_meshcoord(self, axis_x_or_y):
+ # MeshCoord metadata matches that of the relevant node coord.
+ self.setup_mesh(location="node", axis=axis_x_or_y)
+ meshcoord = self.mesh.to_MeshCoord(
+ location=self.location, axis=self.axis
+ )
+ self.coord_metadata_matches(meshcoord, self.node_coord)
+
+ def test_faceedge_basic(self, location_face_or_edge, axis_x_or_y):
+ # MeshCoord metadata matches that of the face/edge ("points") coord.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ meshcoord = self.mesh.to_MeshCoord(
+ location=self.location, axis=self.axis
+ )
+ self.coord_metadata_matches(meshcoord, self.location_coord)
+
+ @pytest.mark.parametrize(
+ "fieldname", ["long_name", "var_name", "attributes"]
+ )
+ def test_faceedge_dontcare_fields(
+ self, location_face_or_edge, axis_x_or_y, fieldname
+ ):
+ # Check that it's ok for the face/edge and node coords to have different
+ # long-name, var-name or attributes.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ if fieldname == "attributes":
+ different_value = {"myattrib": "different attributes"}
+ else:
+ # others are just arbitrary strings.
+ different_value = "different"
+ setattr(self.location_coord, fieldname, different_value)
+ # Mostly.. just check this does not cause an error, as it would do if we
+ # modified "standard_name" or "units" (see other tests) ...
+ meshcoord = self.mesh.to_MeshCoord(
+ location=self.location, axis=self.axis
+ )
+ # ... but also, check that the result matches the expected face/edge coord.
+ self.coord_metadata_matches(meshcoord, self.location_coord)
+
+ def test_faceedge_fail_mismatched_stdnames(
+ self, location_face_or_edge, axis_x_or_y
+ ):
+ # Different "standard_name" for node and face/edge causes an error.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ node_name = f"projection_{axis_x_or_y}_coordinate"
+ self.node_coord.standard_name = node_name
+ location_name = "longitude" if axis_x_or_y == "x" else "latitude"
+ msg = (
+ "Node coordinate .*"
+ f"disagrees with the {location_face_or_edge} coordinate .*, "
+ 'in having a "standard_name" value of '
+ f"'{node_name}' instead of '{location_name}'"
+ )
+ with pytest.raises(ValueError, match=msg):
+ self.mesh.to_MeshCoord(
+ location=location_face_or_edge, axis=axis_x_or_y
+ )
+
+ def test_faceedge_fail_missing_stdnames(
+ self, location_face_or_edge, axis_x_or_y
+ ):
+ # "standard_name" compared with None also causes an error.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ self.node_coord.standard_name = None
+ # N.B. in the absence of a standard-name, we **must** provide an extra ".axis"
+ # property, or the coordinate cannot be correctly identified in the Mesh.
+ # This is a bit of a kludge, but works with current code.
+ self.node_coord.axis = axis_x_or_y
+
+ location_name = "longitude" if axis_x_or_y == "x" else "latitude"
+ msg = (
+ "Node coordinate .*"
+ f"disagrees with the {location_face_or_edge} coordinate .*, "
+ 'in having a "standard_name" value of '
+ f"None instead of '{location_name}'"
+ )
+ with pytest.raises(ValueError, match=msg):
+ self.mesh.to_MeshCoord(
+ location=location_face_or_edge, axis=axis_x_or_y
+ )
+
+ def test_faceedge_fail_mismatched_units(
+ self, location_face_or_edge, axis_x_or_y
+ ):
+ # Different "units" for node and face/edge causes an error.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ self.node_coord.units = "hPa"
+ msg = (
+ "Node coordinate .*"
+ f"disagrees with the {location_face_or_edge} coordinate .*, "
+ 'in having a "units" value of '
+ "'hPa' instead of 'degrees'"
+ )
+ with pytest.raises(ValueError, match=msg):
+ self.mesh.to_MeshCoord(
+ location=location_face_or_edge, axis=axis_x_or_y
+ )
+
+ def test_faceedge_missing_units(self, location_face_or_edge, axis_x_or_y):
+ # Units compared with a None ("unknown") is not an error.
+ self.setup_mesh(location_face_or_edge, axis_x_or_y)
+ self.node_coord.units = None
+ # This is OK
+ meshcoord = self.mesh.to_MeshCoord(
+ location=self.location, axis=self.axis
+ )
+ # ... but also, check that the result matches the expected face/edge coord.
+ self.coord_metadata_matches(meshcoord, self.location_coord)
+
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py
index a8e44747dd..ffe00c8c19 100644
--- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py
@@ -127,7 +127,6 @@ def _make_testcase_cdl(
include_cellmeasure=False,
include_ancil=False,
):
-
phenom_extra_attrs_string = ""
extra_vars_string = ""
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py
new file mode 100644
index 0000000000..b057a41a3e
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py
@@ -0,0 +1,77 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`.
+
+"""
+
+from unittest import mock
+
+import numpy as np
+import pytest
+
+from iris.exceptions import CannotAddError
+from iris.fileformats._nc_load_rules.helpers import build_ancil_var
+
+
+@pytest.fixture
+def mock_engine():
+ return mock.Mock(
+ cube=mock.Mock(),
+ cf_var=mock.Mock(dimensions=("foo", "bar")),
+ filename="DUMMY",
+ cube_parts=dict(ancillary_variables=[]),
+ )
+
+
+@pytest.fixture
+def mock_cf_av_var(monkeypatch):
+ data = np.arange(6)
+ output = mock.Mock(
+ dimensions=("foo",),
+ scale_factor=1,
+ add_offset=0,
+ cf_name="wibble",
+ cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]),
+ standard_name=None,
+ long_name="wibble",
+ units="m2",
+ shape=data.shape,
+ dtype=data.dtype,
+ __getitem__=lambda self, key: data[key],
+ )
+
+ # Create patch for deferred loading that prevents attempted
+ # file access. This assumes that output is defined in the test case.
+ def patched__getitem__(proxy_self, keys):
+ if proxy_self.variable_name == output.cf_name:
+ return output[keys]
+ raise RuntimeError()
+
+ monkeypatch.setattr(
+ "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__",
+ patched__getitem__,
+ )
+
+ return output
+
+
+def test_not_added(monkeypatch, mock_engine, mock_cf_av_var):
+ # Confirm that the ancillary variable will be skipped if a CannotAddError
+ # is raised when attempting to add.
+ def mock_add_ancillary_variable(_, __):
+ raise CannotAddError("foo")
+
+ with monkeypatch.context() as m:
+ m.setattr(
+ mock_engine.cube,
+ "add_ancillary_variable",
+ mock_add_ancillary_variable,
+ )
+ with pytest.warns(match="ancillary variable not added to Cube: foo"):
+ build_ancil_var(mock_engine, mock_cf_av_var)
+
+ assert mock_engine.cube_parts["ancillary_variables"] == []
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py
index 95f892454b..13622b72e2 100644
--- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py
@@ -16,8 +16,10 @@
from unittest import mock
import numpy as np
+import pytest
from iris.coords import AuxCoord
+from iris.exceptions import CannotAddError
from iris.fileformats._nc_load_rules.helpers import build_auxiliary_coordinate
from iris.fileformats.cf import CFVariable
@@ -280,6 +282,11 @@ def get_cf_bounds_var(coord_var):
new=get_cf_bounds_var,
)
+ # test_not_added() has been written in pytest-style, but the rest of
+ # the class is pending migration. Defining self.monkeypatch (not the
+ # typical practice in pure pytest) allows this transitional state.
+ self.monkeypatch = pytest.MonkeyPatch()
+
def check_case_aux_coord_construction(self, climatology=False):
# Test a generic auxiliary coordinate, with or without
# a climatological coord.
@@ -305,6 +312,19 @@ def test_aux_coord_construction(self):
def test_aux_coord_construction__climatology(self):
self.check_case_aux_coord_construction(climatology=True)
+ def test_not_added(self):
+ # Confirm that the coord will be skipped if a CannotAddError is raised
+ # when attempting to add.
+ def mock_add_aux_coord(_, __):
+ raise CannotAddError("foo")
+
+ with self.monkeypatch.context() as m:
+ m.setattr(self.engine.cube, "add_aux_coord", mock_add_aux_coord)
+ with pytest.warns(match="coordinate not added to Cube: foo"):
+ build_auxiliary_coordinate(self.engine, self.cf_coord_var)
+
+ assert self.engine.cube_parts["coordinates"] == []
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py
new file mode 100644
index 0000000000..efbb0649c9
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py
@@ -0,0 +1,74 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`.
+
+"""
+
+from unittest import mock
+
+import numpy as np
+import pytest
+
+from iris.exceptions import CannotAddError
+from iris.fileformats._nc_load_rules.helpers import build_cell_measures
+
+
+@pytest.fixture
+def mock_engine():
+ return mock.Mock(
+ cube=mock.Mock(),
+ cf_var=mock.Mock(dimensions=("foo", "bar")),
+ filename="DUMMY",
+ cube_parts=dict(cell_measures=[]),
+ )
+
+
+@pytest.fixture
+def mock_cf_cm_var(monkeypatch):
+ data = np.arange(6)
+ output = mock.Mock(
+ dimensions=("foo",),
+ scale_factor=1,
+ add_offset=0,
+ cf_name="wibble",
+ cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]),
+ standard_name=None,
+ long_name="wibble",
+ units="m2",
+ shape=data.shape,
+ dtype=data.dtype,
+ __getitem__=lambda self, key: data[key],
+ cf_measure="area",
+ )
+
+ # Create patch for deferred loading that prevents attempted
+ # file access. This assumes that output is defined in the test case.
+ def patched__getitem__(proxy_self, keys):
+ if proxy_self.variable_name == output.cf_name:
+ return output[keys]
+ raise RuntimeError()
+
+ monkeypatch.setattr(
+ "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__",
+ patched__getitem__,
+ )
+
+ return output
+
+
+def test_not_added(monkeypatch, mock_engine, mock_cf_cm_var):
+ # Confirm that the cell measure will be skipped if a CannotAddError is
+ # raised when attempting to add.
+ def mock_add_cell_measure(_, __):
+ raise CannotAddError("foo")
+
+ with monkeypatch.context() as m:
+ m.setattr(mock_engine.cube, "add_cell_measure", mock_add_cell_measure)
+ with pytest.warns(match="cell measure not added to Cube: foo"):
+ build_cell_measures(mock_engine, mock_cf_cm_var)
+
+ assert mock_engine.cube_parts["cell_measures"] == []
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py
index a75678d923..bc13975441 100644
--- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py
@@ -17,8 +17,10 @@
import warnings
import numpy as np
+import pytest
from iris.coords import AuxCoord, DimCoord
+from iris.exceptions import CannotAddError
from iris.fileformats._nc_load_rules.helpers import build_dimension_coordinate
@@ -73,6 +75,12 @@ def setUp(self):
)
self.bounds = bounds
+ # test_dimcoord_not_added() and test_auxcoord_not_added have been
+ # written in pytest-style, but the rest of the class is pending
+ # migration. Defining self.monkeypatch (not the
+ # typical practice in pure pytest) allows this transitional state.
+ self.monkeypatch = pytest.MonkeyPatch()
+
def _set_cf_coord_var(self, points):
self.cf_coord_var = mock.Mock(
dimensions=("foo",),
@@ -221,7 +229,9 @@ def test_aux_coord_construction(self):
warning_patch = mock.patch("warnings.warn")
# Asserts must lie within context manager because of deferred loading.
- with warning_patch, self.deferred_load_patch, self.get_cf_bounds_var_patch:
+ with (
+ warning_patch
+ ), self.deferred_load_patch, self.get_cf_bounds_var_patch:
build_dimension_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
@@ -233,6 +243,40 @@ def test_aux_coord_construction(self):
warnings.warn.call_args[0][0],
)
+ def test_dimcoord_not_added(self):
+ # Confirm that the coord will be skipped if a CannotAddError is raised
+ # when attempting to add.
+ def mock_add_dim_coord(_, __):
+ raise CannotAddError("foo")
+
+ with self.monkeypatch.context() as m:
+ m.setattr(self.engine.cube, "add_dim_coord", mock_add_dim_coord)
+
+ self._set_cf_coord_var(np.arange(6))
+
+ with self.deferred_load_patch, self.get_cf_bounds_var_patch:
+ with pytest.warns(match="coordinate not added to Cube: foo"):
+ build_dimension_coordinate(self.engine, self.cf_coord_var)
+
+ assert self.engine.cube_parts["coordinates"] == []
+
+ def test_auxcoord_not_added(self):
+ # Confirm that a gracefully-created auxiliary coord will also be
+ # skipped if a CannotAddError is raised when attempting to add.
+ def mock_add_aux_coord(_, __):
+ raise CannotAddError("foo")
+
+ with self.monkeypatch.context() as m:
+ m.setattr(self.engine.cube, "add_aux_coord", mock_add_aux_coord)
+
+ self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5]))
+
+ with self.deferred_load_patch, self.get_cf_bounds_var_patch:
+ with pytest.warns(match="coordinate not added to Cube: foo"):
+ build_dimension_coordinate(self.engine, self.cf_coord_var)
+
+ assert self.engine.cube_parts["coordinates"] == []
+
class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin):
def setUp(self):
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
index 575c852ece..18e86a9f57 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
@@ -1082,7 +1082,6 @@ def test_mesh_dim_names(self):
("dim invalid-name &%!", "dim_invalid_name____"),
]
for given_name, expected_name in dim_names_tests:
-
mesh = make_mesh(mesh_kwargs={"face_dimension": given_name})
filepath = self.check_save_mesh(mesh)
diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py
index 5e2bbcaa2c..316894ded1 100644
--- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py
+++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py
@@ -44,7 +44,6 @@
class DummyPPField(PPField):
-
HEADER_DEFN = DUMMY_HEADER
HEADER_DICT = dict(DUMMY_HEADER)
diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py
index 83475c6782..73913c6219 100644
--- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py
+++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py
@@ -17,10 +17,49 @@
import numpy as np
import numpy.ma as ma
+import pytest
import iris.fileformats.pp as pp
+@pytest.mark.parametrize("data_shape", [(2, 3)])
+@pytest.mark.parametrize(
+ "expected_shape", [(2, 3), (3, 2), (1, 3), (2, 2), (3, 3), (2, 4)]
+)
+@pytest.mark.parametrize(
+ "data_type", [np.float32, np.int32, np.int16, np.int8]
+)
+def test_data_padding__no_compression(data_shape, expected_shape, data_type):
+ data = np.empty(data_shape, dtype=data_type)
+
+ # create the field data buffer
+ buffer = io.BytesIO()
+ buffer.write(data)
+ buffer.seek(0)
+ data_bytes = buffer.read()
+
+ lbpack = pp.SplittableInt(0, dict(n1=0, n2=1))
+ boundary_packing = None
+ mdi = -1
+ args = (
+ data_bytes,
+ lbpack,
+ boundary_packing,
+ expected_shape,
+ data_type,
+ mdi,
+ )
+ data_length, expected_length = np.prod(data_shape), np.prod(expected_shape)
+
+ if expected_length <= data_length:
+ result = pp._data_bytes_to_shaped_array(*args)
+ assert result.shape == expected_shape
+ else:
+ emsg = r"data containing \d+ words does not match expected length"
+ with pytest.raises(ValueError, match=emsg):
+ _ = pp._data_bytes_to_shaped_array(*args)
+
+
class Test__data_bytes_to_shaped_array__lateral_boundary_compression(
tests.IrisTest
):
diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py
index 45012dc8bd..8200259cca 100644
--- a/lib/iris/tests/unit/fileformats/pp/test_save.py
+++ b/lib/iris/tests/unit/fileformats/pp/test_save.py
@@ -13,6 +13,8 @@
import cf_units
import cftime
+import numpy as np
+import pytest
from iris.coords import CellMethod, DimCoord
from iris.fileformats._ff_cross_references import STASH_TRANS
@@ -21,6 +23,27 @@
import iris.tests.stock as stock
+@pytest.mark.parametrize(
+ "unit,modulus",
+ [
+ (cf_units.Unit("radians"), 2 * np.pi),
+ (cf_units.Unit("degrees"), 360.0),
+ (None, 360.0),
+ ],
+)
+def test_grid_and_pole__scalar_dim_longitude(unit, modulus):
+ cube = stock.lat_lon_cube()[:, -1:]
+ assert cube.ndim == 2
+ lon = cube.coord("longitude")
+ lon.units = unit
+
+ field = _pp_save_ppfield_values(cube)
+ bdx = modulus
+ assert field.bdx == bdx
+ assert field.bzx == (lon.points[0] - bdx)
+ assert field.lbnpt == lon.points.size
+
+
def _pp_save_ppfield_values(cube):
"""
Emulate saving a cube as PP, and capture the resulting PP field values.
diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py
index 62eb7ff019..e194e240c6 100644
--- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py
+++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py
@@ -211,7 +211,6 @@ def test_lbcode3x23(self):
class TestLBTIMx2x_ZeroYears(TestField):
-
_spec = [
"lbtim",
"lbcode",
diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py
index 5aeebd6045..5f9dece153 100644
--- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py
+++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py
@@ -68,7 +68,7 @@ def test_chunk_size_limiting(self):
((11, 2, 1011, 1022), (5, 2, 1011, 1022)),
]
err_fmt = "Result of optimising chunks {} was {}, expected {}"
- for (shape, expected) in given_shapes_and_resulting_chunks:
+ for shape, expected in given_shapes_and_resulting_chunks:
chunks = _optimum_chunksize(
shape, shape, limit=self.FIXED_CHUNKSIZE_LIMIT
)
@@ -86,7 +86,7 @@ def test_chunk_size_expanding(self):
((3, 300, 200), (117, 300, 1000), (39, 300, 1000)),
]
err_fmt = "Result of optimising shape={};chunks={} was {}, expected {}"
- for (shape, fullshape, expected) in given_shapes_and_resulting_chunks:
+ for shape, fullshape, expected in given_shapes_and_resulting_chunks:
chunks = _optimum_chunksize(
chunks=shape, shape=fullshape, limit=self.FIXED_CHUNKSIZE_LIMIT
)
diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py
index 31b1efb3fd..0fca726b28 100644
--- a/lib/iris/tests/unit/merge/test_ProtoCube.py
+++ b/lib/iris/tests/unit/merge/test_ProtoCube.py
@@ -77,8 +77,7 @@ def test_error(self):
self.assertTrue(result)
-@tests.iristest_timing_decorator
-class Test_register__match(Mixin_register, tests.IrisTest_nometa):
+class Test_register__match(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return []
@@ -88,8 +87,7 @@ def cube2(self):
return example_cube()
-@tests.iristest_timing_decorator
-class Test_register__standard_name(Mixin_register, tests.IrisTest_nometa):
+class Test_register__standard_name(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.standard_name", "air_temperature", "air_density"]
@@ -101,8 +99,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__long_name(Mixin_register, tests.IrisTest_nometa):
+class Test_register__long_name(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.long_name", "screen_air_temp", "Belling"]
@@ -114,8 +111,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__var_name(Mixin_register, tests.IrisTest_nometa):
+class Test_register__var_name(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.var_name", "'airtemp'", "'airtemp2'"]
@@ -127,8 +123,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__units(Mixin_register, tests.IrisTest_nometa):
+class Test_register__units(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.units", "'K'", "'C'"]
@@ -140,8 +135,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__attributes_unequal(Mixin_register, tests.IrisTest_nometa):
+class Test_register__attributes_unequal(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.attributes", "'mint'"]
@@ -153,10 +147,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__attributes_unequal_array(
- Mixin_register, tests.IrisTest_nometa
-):
+class Test_register__attributes_unequal_array(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.attributes", "'mint'"]
@@ -174,10 +165,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__attributes_superset(
- Mixin_register, tests.IrisTest_nometa
-):
+class Test_register__attributes_superset(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.attributes", "'stuffed'"]
@@ -189,10 +177,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__attributes_multi_diff(
- Mixin_register, tests.IrisTest_nometa
-):
+class Test_register__attributes_multi_diff(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.attributes", "'sam'", "'mint'"]
@@ -215,8 +200,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__cell_method(Mixin_register, tests.IrisTest_nometa):
+class Test_register__cell_method(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.cell_methods"]
@@ -228,8 +212,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__data_shape(Mixin_register, tests.IrisTest_nometa):
+class Test_register__data_shape(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube.shape", "(2,)", "(3,)"]
@@ -241,8 +224,7 @@ def cube2(self):
return cube
-@tests.iristest_timing_decorator
-class Test_register__data_dtype(Mixin_register, tests.IrisTest_nometa):
+class Test_register__data_dtype(Mixin_register, tests.IrisTest):
@property
def fragments(self):
return ["cube data dtype", "int32", "int8"]
@@ -307,7 +289,6 @@ def test_noise(self):
class Test_register__CoordSig_general(_MergeTest, tests.IrisTest):
-
_mergetest_type = "coord"
def setUp(self):
@@ -462,7 +443,6 @@ def test_coord_system(self):
class Test_register__CoordSig_scalar(_MergeTest_coordprops, tests.IrisTest):
-
_mergetest_type = "aux_coords (scalar)"
def setUp(self):
@@ -504,7 +484,6 @@ def test_dims(self):
class Test_register__CoordSig_dim(_MergeTest_coordprops_vect, tests.IrisTest):
-
_mergetest_type = "dim_coords"
_coord_typename = "dim_coord"
@@ -533,7 +512,6 @@ def test_circular(self):
class Test_register__CoordSig_aux(_MergeTest_coordprops_vect, tests.IrisTest):
-
_mergetest_type = "aux_coords (non-scalar)"
_coord_typename = "aux_coord"
diff --git a/lib/iris/tests/unit/pandas/__init__.py b/lib/iris/tests/unit/pandas/__init__.py
new file mode 100644
index 0000000000..103a264839
--- /dev/null
+++ b/lib/iris/tests/unit/pandas/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Unit tests for the :mod:`iris.pandas` module."""
diff --git a/lib/iris/tests/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py
similarity index 67%
rename from lib/iris/tests/test_pandas.py
rename to lib/iris/tests/unit/pandas/test_pandas.py
index f47df75def..fd716bd7c9 100644
--- a/lib/iris/tests/test_pandas.py
+++ b/lib/iris/tests/unit/pandas/test_pandas.py
@@ -3,6 +3,7 @@
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
+"""All unit tests for the :mod:`iris.pandas` module."""
# import iris tests first so that some things can be initialised before
# importing anything else
@@ -11,6 +12,7 @@
import copy
import datetime
from termios import IXOFF # noqa: F401
+import warnings
import cf_units
import cftime
@@ -42,7 +44,17 @@
import iris.pandas
+@pytest.fixture
+def activate_pandas_ndim():
+ iris.FUTURE.pandas_ndim = True
+ yield None
+ iris.FUTURE.pandas_ndim = False
+
+
@skip_pandas
+@pytest.mark.filterwarnings(
+ "ignore:.*as_series has been deprecated.*:iris._deprecation.IrisDeprecation"
+)
class TestAsSeries(tests.IrisTest):
"""Test conversion of 1D cubes to Pandas using as_series()"""
@@ -121,7 +133,7 @@ def test_copy_int32_false(self):
assert cube.data[0] == 99
def test_copy_int64_false(self):
- cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo")
+ cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int64), long_name="foo")
series = iris.pandas.as_series(cube, copy=False)
series[0] = 99
assert cube.data[0] == 99
@@ -147,6 +159,9 @@ def test_copy_masked_false(self):
@skip_pandas
+@pytest.mark.filterwarnings(
+ "ignore:You are using legacy 2-dimensional behaviour.*:FutureWarning"
+)
class TestAsDataFrame(tests.IrisTest):
"""Test conversion of 2D cubes to Pandas using as_data_frame()"""
@@ -321,6 +336,363 @@ def test_copy_false_with_cube_view(self):
assert cube.data[0, 0] == 99
+@skip_pandas
+class TestAsDataFrameNDim(tests.IrisTest):
+ """Test conversion of n-dimensional cubes to Pandas using as_data_frame()"""
+
+ @pytest.fixture(autouse=True)
+ def _activate_pandas_ndim(self, activate_pandas_ndim):
+ pass
+
+ def test_no_dim_coords(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ expected_dim0 = np.repeat([0, 1], 5)
+ expected_dim1 = np.tile([0, 1, 2, 3, 4], 2)
+ expected_foo = np.arange(0, 10)
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.foo.values, expected_foo)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("dim0"), expected_dim0
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("dim1"), expected_dim1
+ )
+
+ def test_no_x_coord(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ dim0 = DimCoord([10, 11], long_name="bar")
+ cube.add_dim_coord(dim0, 0)
+ expected_bar = np.repeat([10, 11], 5)
+ expected_dim1 = np.tile([0, 1, 2, 3, 4], 2)
+ expected_foo = np.arange(0, 10)
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("bar"), expected_bar
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("dim1"), expected_dim1
+ )
+
+ def test_no_y_coord(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ dim1 = DimCoord([10, 11, 12, 13, 14], long_name="bar")
+ cube.add_dim_coord(dim1, 1)
+ expected_dim0 = np.repeat([0, 1], 5)
+ expected_bar = np.tile([10, 11, 12, 13, 14], 2)
+ expected_foo = np.arange(0, 10)
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.foo, expected_foo.data)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("dim0"), expected_dim0
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("bar"), expected_bar
+ )
+
+ def test_simple1D(self):
+ cube = Cube(np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), long_name="foo")
+ dim_coord = DimCoord(
+ [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], long_name="bar"
+ )
+ cube.add_dim_coord(dim_coord, 0)
+ expected_bar = np.arange(10, 20)
+ expected_foo = np.arange(0, 10)
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("bar"), expected_bar
+ )
+
+ def test_simple2D(self):
+ cube2d = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ dim0_coord = DimCoord([15, 16], long_name="milk")
+ dim1_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar")
+ cube2d.add_dim_coord(dim0_coord, 0)
+ cube2d.add_dim_coord(dim1_coord, 1)
+ expected_milk = np.repeat([15, 16], 5)
+ expected_bar = np.tile([10, 11, 12, 13, 14], 2)
+ expected_foo = np.arange(0, 10)
+ data_frame = iris.pandas.as_data_frame(cube2d)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("milk"), expected_milk
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("bar"), expected_bar
+ )
+
+ def test_simple3D(self):
+ cube3d = Cube(
+ np.array(
+ [
+ [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]],
+ [[10, 11, 12, 13, 14], [15, 16, 17, 18, 19]],
+ [[20, 21, 22, 23, 24], [25, 26, 27, 28, 29]],
+ ]
+ ),
+ long_name="foo",
+ )
+ dim0_coord = DimCoord([1, 2, 3], long_name="milk")
+ dim1_coord = DimCoord([10, 11], long_name="bar")
+ dim2_coord = DimCoord([20, 21, 22, 23, 24], long_name="kid")
+ cube3d.add_dim_coord(dim0_coord, 0)
+ cube3d.add_dim_coord(dim1_coord, 1)
+ cube3d.add_dim_coord(dim2_coord, 2)
+ expected_milk = np.repeat([1, 2, 3], 10)
+ expected_bar = np.tile(np.repeat([10, 11], 5), 3)
+ expected_kid = np.tile([20, 21, 22, 23, 24], 6)
+ expected_foo = np.arange(0, 30)
+ data_frame = iris.pandas.as_data_frame(cube3d)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("milk"), expected_milk
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("bar"), expected_bar
+ )
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("kid"), expected_kid
+ )
+
+ def test_copy_false(self):
+ cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo")
+ data_frame = iris.pandas.as_data_frame(cube, copy=False)
+ cube.data[2] = 99
+ assert cube.data[2] == data_frame.foo[2]
+
+ def test_copy_true(self):
+ cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo")
+ data_frame = iris.pandas.as_data_frame(cube, copy=True)
+ cube.data[2] = 99
+ assert cube.data[2] != data_frame.foo[2]
+
+ def test_time_standard(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts"
+ )
+ day_offsets = [0, 100.1, 200.2, 300.3, 400.4]
+ time_coord = DimCoord(
+ day_offsets, long_name="time", units="days since 2000-01-01 00:00"
+ )
+ cube.add_dim_coord(time_coord, 1)
+ expected_ts = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
+ expected_time = np.array(
+ [
+ cftime.DatetimeGregorian(
+ 2000, 1, 1, 0, 0, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 4, 10, 2, 24, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 7, 19, 4, 48, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 10, 27, 7, 12, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2001, 2, 4, 9, 36, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 1, 1, 0, 0, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 4, 10, 2, 24, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 7, 19, 4, 48, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2000, 10, 27, 7, 12, 0, 0, has_year_zero=False
+ ),
+ cftime.DatetimeGregorian(
+ 2001, 2, 4, 9, 36, 0, 0, has_year_zero=False
+ ),
+ ],
+ dtype=object,
+ )
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.ts, expected_ts)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("time"), expected_time
+ )
+
+ def test_time_360(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts"
+ )
+ time_unit = cf_units.Unit(
+ "days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY
+ )
+ time_coord = DimCoord(
+ [100.1, 200.2], long_name="time", units=time_unit
+ )
+ cube.add_dim_coord(time_coord, 0)
+ expected_time = np.array(
+ [
+ cftime.Datetime360Day(
+ 2000, 4, 11, 2, 24, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 4, 11, 2, 24, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 4, 11, 2, 24, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 4, 11, 2, 24, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 4, 11, 2, 24, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 7, 21, 4, 48, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 7, 21, 4, 48, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 7, 21, 4, 48, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 7, 21, 4, 48, 0, 0, has_year_zero=True
+ ),
+ cftime.Datetime360Day(
+ 2000, 7, 21, 4, 48, 0, 0, has_year_zero=True
+ ),
+ ],
+ dtype=object,
+ )
+ expected_ts = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
+ data_frame = iris.pandas.as_data_frame(cube)
+ self.assertArrayEqual(data_frame.ts, expected_ts)
+ self.assertArrayEqual(
+ data_frame.index.get_level_values("time"), expected_time
+ )
+
+ def test_aux_coord(self):
+ cube = Cube(np.array([[0, 1], [5, 6]]), long_name="foo")
+ dim0_coord = DimCoord([15, 16], long_name="milk")
+ dim1_coord = DimCoord([10, 11], long_name="bar")
+ aux0_coord = AuxCoord(["fiveteen", "sixteen"], long_name="words0")
+ aux1_coord = AuxCoord(["ten", "eleven"], long_name="words1")
+ cube.add_dim_coord(dim0_coord, 0)
+ cube.add_dim_coord(dim1_coord, 1)
+ cube.add_aux_coord(aux0_coord, 0)
+ cube.add_aux_coord(aux1_coord, 1)
+ expected_foo = np.array([0, 1, 5, 6])
+ expected_words0 = np.repeat(["fiveteen", "sixteen"], 2)
+ expected_words1 = np.tile(["ten", "eleven"], 2)
+ data_frame = iris.pandas.as_data_frame(cube, add_aux_coords=True)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(data_frame.words0, expected_words0)
+ self.assertArrayEqual(data_frame.words1, expected_words1)
+
+ def test_aux_coord2(self):
+ cube = Cube(np.array([[0, 1], [5, 6]]), long_name="foo")
+ dim0_coord = DimCoord([15, 16], long_name="milk")
+ dim1_coord = DimCoord([10, 11], long_name="bar")
+ aux0_coord = AuxCoord(["fiveteen0", "sixteen0"], long_name="words0")
+ aux1_coord = AuxCoord(["fiveteen1", "sixteen1"], long_name="words1")
+ aux2_coord = AuxCoord(["ten", "eleven"], long_name="words2")
+ cube.add_dim_coord(dim0_coord, 0)
+ cube.add_dim_coord(dim1_coord, 1)
+ # Two aux coords associated with dim0
+ cube.add_aux_coord(aux0_coord, 0)
+ cube.add_aux_coord(aux1_coord, 0)
+ # One aux coords associated with dim1
+ cube.add_aux_coord(aux2_coord, 1)
+ expected_foo = np.array([0, 1, 5, 6])
+ expected_words0 = np.repeat(["fiveteen0", "sixteen0"], 2)
+ expected_words1 = np.repeat(["fiveteen1", "sixteen1"], 2)
+ expected_words2 = np.tile(["ten", "eleven"], 2)
+ data_frame = iris.pandas.as_data_frame(cube, add_aux_coords=True)
+ self.assertArrayEqual(data_frame.foo, expected_foo)
+ self.assertArrayEqual(data_frame.words0, expected_words0)
+ self.assertArrayEqual(data_frame.words1, expected_words1)
+ self.assertArrayEqual(data_frame.words2, expected_words2)
+
+ def test_multidim_aux(self):
+ cube = Cube(
+ np.arange(300, 312, 1).reshape([2, 2, 3]),
+ long_name="air_temperature",
+ )
+ dim0_coord = DimCoord([0, 10], long_name="longitude")
+ dim1_coord = DimCoord([25, 35], long_name="latitude")
+ dim2_coord = DimCoord([0, 100, 200], long_name="height")
+ aux0_coord = AuxCoord(
+ [[True, False], [False, False]], long_name="in_region"
+ )
+ cube.add_dim_coord(dim0_coord, 0)
+ cube.add_dim_coord(dim1_coord, 1)
+ cube.add_dim_coord(dim2_coord, 2)
+ cube.add_aux_coord(aux0_coord, data_dims=(0, 1))
+ expected_in_region = np.repeat([True, False, False, False], 3)
+ data_frame = iris.pandas.as_data_frame(cube, add_aux_coords=True)
+ self.assertArrayEqual(data_frame.in_region, expected_in_region)
+
+ def test_add_scalar_coord(self):
+ cube = Cube(np.array([[0, 1], [5, 6]]), long_name="foo")
+ scalar_coord = iris.coords.AuxCoord(
+ 1, long_name="scalar_coord", units="no_unit"
+ )
+ cube.add_aux_coord(scalar_coord)
+ expected_scalar_coord = np.repeat(1, 4)
+ data_frame = iris.pandas.as_data_frame(cube, add_aux_coords=True)
+ self.assertArrayEqual(data_frame.scalar_coord, expected_scalar_coord)
+
+ def test_add_ancillary_variable(self):
+ cube = Cube(np.array([[0, 1], [5, 6]]), long_name="foo")
+ dim0_coord = DimCoord([0, 10], long_name="bar")
+ cube.add_dim_coord(dim0_coord, 0)
+ av = AncillaryVariable([10, 100], long_name="ancil_bar")
+ av2 = AncillaryVariable(
+ [1000], long_name="ancil_bar2"
+ ) # Scalar ancillary variable
+ cube.add_ancillary_variable(av, 0)
+ cube.add_ancillary_variable(av2)
+ expected_ancillary_variable = np.repeat([10, 100], 2)
+ expected_ancillary_variable2 = np.repeat([1000], 4)
+ data_frame = iris.pandas.as_data_frame(
+ cube, add_ancillary_variables=True
+ )
+ self.assertArrayEqual(
+ data_frame.ancil_bar, expected_ancillary_variable
+ )
+ self.assertArrayEqual(
+ data_frame.ancil_bar2, expected_ancillary_variable2
+ )
+
+ def test_add_cell_measures(self):
+ cube = Cube(np.array([[0, 1], [5, 6]]), long_name="foo")
+ dim0_coord = DimCoord([0, 10], long_name="bar")
+ cube.add_dim_coord(dim0_coord, 0)
+ cm = CellMeasure([10, 100], long_name="cell_measure")
+ cm2 = CellMeasure(
+ 1e4, long_name="cell_measure2"
+ ) # Scalar cell measure
+ cube.add_cell_measure(cm, 0)
+ cube.add_cell_measure(cm2)
+ expected_cell_measure = np.repeat([10, 100], 2)
+ expected_cell_measure2 = np.repeat(1e4, 4)
+ data_frame = iris.pandas.as_data_frame(cube, add_cell_measures=True)
+ self.assertArrayEqual(data_frame.cell_measure, expected_cell_measure)
+ self.assertArrayEqual(data_frame.cell_measure2, expected_cell_measure2)
+
+ def test_instance_error(self):
+ with pytest.raises(TypeError):
+ _ = iris.pandas.as_data_frame(list())
+
+
@skip_pandas
@pytest.mark.filterwarnings(
"ignore:.*as_cube has been deprecated.*:iris._deprecation.IrisDeprecation"
@@ -512,15 +884,37 @@ def test_copy_false(self):
@skip_pandas
-class TestFutureAndDeprecation(tests.IrisTest):
- def test_deprecation_warning(self):
+class TestFutureAndDeprecation:
+ def test_as_cube_deprecation_warning(self):
data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]])
with pytest.warns(
IrisDeprecation, match="as_cube has been deprecated"
):
_ = iris.pandas.as_cube(data_frame)
- # Tests for FUTURE are expected when as_dataframe() is made n-dimensional.
+ def test_as_series_deprecation_warning(self):
+ cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo")
+ with pytest.warns(
+ IrisDeprecation, match="as_series has been deprecated"
+ ):
+ _ = iris.pandas.as_series(cube)
+
+ def test_as_dataframe_future_warning(self):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ with pytest.warns(
+ FutureWarning, match="You are using legacy 2-dimensional behaviour"
+ ):
+ _ = iris.pandas.as_data_frame(cube)
+
+ def test_as_dataframe_no_future_warning(self, activate_pandas_ndim):
+ cube = Cube(
+ np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"
+ )
+ with warnings.catch_warnings():
+ warnings.simplefilter("error", FutureWarning)
+ _ = iris.pandas.as_data_frame(cube)
@skip_pandas
diff --git a/lib/iris/tests/unit/tests/test_IrisTest.py b/lib/iris/tests/unit/tests/test_IrisTest.py
index 5725b59d40..10de2a7760 100644
--- a/lib/iris/tests/unit/tests/test_IrisTest.py
+++ b/lib/iris/tests/unit/tests/test_IrisTest.py
@@ -66,8 +66,7 @@ def test_different_mask_nonstrict(self):
self._func(self.arr1, arr2, strict=False)
-@tests.iristest_timing_decorator
-class Test_assertMaskedArrayEqual(_MaskedArrayEquality, tests.IrisTest_nometa):
+class Test_assertMaskedArrayEqual(_MaskedArrayEquality, tests.IrisTest):
@property
def _func(self):
return self.assertMaskedArrayEqual
@@ -114,10 +113,7 @@ def test_masked_nonmasked_same_emptymask(self):
self.assertMaskedArrayEqual(arr1, arr2)
-@tests.iristest_timing_decorator
-class Test_assertMaskedArrayAlmostEqual(
- _MaskedArrayEquality, tests.IrisTest_nometa
-):
+class Test_assertMaskedArrayAlmostEqual(_MaskedArrayEquality, tests.IrisTest):
@property
def _func(self):
return self.assertMaskedArrayAlmostEqual
diff --git a/lib/iris/util.py b/lib/iris/util.py
index 3d82ea68c5..9e0db9e66e 100644
--- a/lib/iris/util.py
+++ b/lib/iris/util.py
@@ -735,7 +735,6 @@ def _build_full_slice_given_keys(keys, ndim):
for i, key in enumerate(keys):
if key is Ellipsis:
-
# replace any subsequent Ellipsis objects in keys with
# slice(None, None) as per Numpy
keys = keys[:i] + tuple(
@@ -1815,8 +1814,9 @@ def _mask_array(array, points_to_mask, in_place=False):
If array is lazy then in_place is ignored: _math_op_common will use the
returned value regardless of in_place, so we do not need to implement it
- here. If in_place is True then array must be a np.ma.MaskedArray or dask
- array (must be a dask array if points_to_mask is lazy).
+ here. If in_place is True then array must be a
+ :class:`numpy.ma.MaskedArray` or :class:`dask.array.Array`
+ (must be a dask array if points_to_mask is lazy).
"""
# Decide which array library to use.
@@ -1978,7 +1978,7 @@ def is_masked(array):
Parameters
----------
- array : :class:`numpy.Array` or `dask.array.Array`
+ array : :class:`numpy.Array` or :class:`dask.array.Array`
The array to be checked for masks.
Returns
diff --git a/requirements/ci/nox.lock/py310-linux-64.lock b/requirements/ci/nox.lock/py310-linux-64.lock
index b73b8af3da..910a390493 100644
--- a/requirements/ci/nox.lock/py310-linux-64.lock
+++ b/requirements/ci/nox.lock/py310-linux-64.lock
@@ -1,60 +1,65 @@
# Generated by conda-lock.
# platform: linux-64
-# input_hash: 9bcbc5c76124fc238f88ac16184aebeb8fac11fe9d4df03e70a7f50e2d24aa9f
+# input_hash: 234b47d943728b5abe70fba0fd74c6adc10e4f1e2a14b919344f8a693b5b3e6f
@EXPLICIT
https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81
-https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.9.14-ha878542_0.tar.bz2#87c986dab320658abaf3e701406b665c
+https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5
-https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760
-https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7
+https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3
+https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60
https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf
-https://conda.anaconda.org/conda-forge/noarch/tzdata-2022c-h191b570_0.tar.bz2#a56386ad31a7322940dd7d03fb3a9979
+https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853
+https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2
-https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d
+https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373
https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab
-https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c
-https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.7.2-h166bdaf_0.tar.bz2#4a826cd983be6c8fff07a64b6d2079e7
+https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535
+https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f
https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00
https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54
https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a
-https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.9-h27087fc_0.tar.bz2#493ac8b2503a949aebe33d99ea0c284f
-https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_105.tar.bz2#9d3e01547ba04a57372beee01158096f
+https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad
+https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567
https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8
-https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80
-https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h27087fc_1009.tar.bz2#17f91dc8bb7a259b02be5bfb2cd2395f
+https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55
+https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37
https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d
https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f
https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed
-https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268
+https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407
https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3
+https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51
https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c
+https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4
https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd
-https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734
+https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676
https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3
https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3
-https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211
+https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d
https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d
https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206
https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680
https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35
https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f
-https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee
-https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334
+https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52
+https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5
https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d
https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37
-https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_3.tar.bz2#29b2d63b0e21b765da0418bc452538c9
-https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696
+https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41
+https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0
+https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b
+https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1
https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238
-https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e
-https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717
-https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa
+https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1
+https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092
https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19
https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036
https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a
@@ -64,196 +69,201 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.t
https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534
https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98
https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15
-https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605
+https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27
https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0
https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae
-https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa
+https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78
https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f
-https://conda.anaconda.org/conda-forge/linux-64/libcap-2.65-ha37c62d_0.tar.bz2#2c1c43f5442731b58e070bcee45a86ec
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25
+https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657
https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1
-https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336
-https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef
-https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2
-https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe
-https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c
-https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.38-h753d276_0.tar.bz2#575078de1d3a3114b3ce131bd1508d0c
-https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.3-h753d276_0.tar.bz2#ccb2457c73609f2622b8a4b3e42e5d8b
-https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd
+https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd
+https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0
+https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb
+https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e
+https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416
+https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f
+https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906
https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0
https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904
-https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e
-https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2
-https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_1.tar.bz2#62b588b2a313ac3d9c2ead767baa3b5d
-https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h8e90077_6.tar.bz2#2935b98de57e1f261ef8253655a8eb80
+https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8
+https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf
+https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75
+https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b
https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa
https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168
https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867
https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3
-https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_3.tar.bz2#76c717057865201aa2d24b79315645bb
-https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74
-https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685
-https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e
-https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
-https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce
-https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571
-https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
-https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8
+https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295
+https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555
+https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4
+https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06
+https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336
+https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78
https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719
-https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c
+https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9
+https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe
https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad
-https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a
-https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h55922b4_4.tar.bz2#901791f0ec7cddc8714e76e273013a91
-https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b
-https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_1.tar.bz2#0bd292db365c83624316efc2764d9f16
-https://conda.anaconda.org/conda-forge/linux-64/python-3.10.6-h582c2e5_0_cpython.tar.bz2#6f009f92084e84884d1dff862b85eb00
-https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.3-h4ff8645_0.tar.bz2#f03cf4ec974e32b6c5d349f62637e36e
+https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f
+https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7
+https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c
+https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1
+https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea
+https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c
+https://conda.anaconda.org/conda-forge/linux-64/python-3.10.9-he550d4f_0_cpython.conda#3cb3e91b3fe66baa68a12c85f39b9b40
+https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c
https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790
-https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0
-https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f
-https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1
-https://conda.anaconda.org/conda-forge/noarch/certifi-2022.9.14-pyhd8ed1ab_0.tar.bz2#963e8ceccba45b5cf15f33906d5a20a1
+https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e
+https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31
+https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b
+https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b
+https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9
+https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418
+https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6
https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e
-https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb
-https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433
+https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf
+https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16
+https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb
-https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251
+https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
+https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7
+https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py310hff52083_3.tar.bz2#785160da087cf1d70e989afbb761f01c
+https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a
https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2
-https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526
-https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-hc2a2eb6_1.tar.bz2#139ace7da04f011abbd531cb2a9840ee
-https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.8.2-pyhd8ed1ab_0.tar.bz2#140dc6615896e7d4be1059a63370be93
-https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_1.tar.bz2#a61c6312192e7c9de71548a6706a21e6
-https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5
+https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506
+https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d
+https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2
+https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b
+https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815
+https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed
https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352
-https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905
+https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5
https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9
-https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1003.tar.bz2#9cb956b6605cfc7d8ee1b15e96bd88ba
-https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f
-https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001
-https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617
-https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1
-https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d
-https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98
+https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723
+https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97
+https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77
+https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f
+https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.0-hdc1c0ab_0.conda#c44acb3847ff118c068b662aff858afd
+https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc
+https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5
+https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4
+https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032
+https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py310h37cc914_0.tar.bz2#98d598d9178d7f3091212c61c0be693c
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19
-https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc
-https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2
-https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950
+https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605
+https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea
+https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b
+https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9
https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727
-https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054
+https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py310h5764c6d_0.tar.bz2#c3c55664e9becc48e6a652e2b641961f
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025
-https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-2_cp310.tar.bz2#9e7160cd0d865e98f6803f1fe15c8b61
-https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb
-https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da
+https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b
+https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2
+https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597
+https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2
https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e
https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7
https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095
https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36
-https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98
-https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022
+https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py310h5764c6d_1.tar.bz2#be4a201ac582c11d89ed7d15b3157cc3
+https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a
+https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4
+https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4
https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4
https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb
-https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5
-https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31
-https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda
-https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d
+https://conda.anaconda.org/conda-forge/noarch/zipp-3.13.0-pyhd8ed1ab_0.conda#41b09d997939e83b231c4557a90c3b13
+https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba
+https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1
https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0
-https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_0.tar.bz2#3e4b55b02998782f8ca9ceaaa4f5ada9
-https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7
-https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py310hff52083_2.tar.bz2#1cdb74e021e4e0b703a8c2f7cc57d798
-https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_2.tar.bz2#153cfb02fb8be7dd7cabcbcb58a63053
-https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e
-https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py310hff52083_0.tar.bz2#8ea386e64531f1ecf4a5765181579e7e
-https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_0.tar.bz2#8dc3e2dce8fa122f8df4f3739d1f771b
-https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5
-https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py310h5764c6d_1.tar.bz2#ec5a727504409ad1380fc2a84f83d002
-https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py310h37cc914_2.tar.bz2#0211369f253eedce9e570b4f0e5a981a
+https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835
+https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b
+https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6
+https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.0-hdc1c0ab_0.conda#5d9ac94ee84305ada32c3d287d0ec602
+https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py310h5764c6d_1.tar.bz2#12ebe92a8a578bc903bd844744f4d040
+https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4
+https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0
+https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62
+https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
+https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d
+https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572
+https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7
https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c
-https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.3-py310h53a5b5f_0.tar.bz2#0a60ccaed9ad236cc7463322fe742eb6
-https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54
-https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py310hbd86126_2.tar.bz2#443272de4234f6df4a78f50105edc741
-https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836
-https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py310hff52083_3.tar.bz2#97f9a22577338f91a94dfac5c1a65a50
+https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py310h023d228_1.conda#bbea829b541aa15df5c65bd40b8c1981
+https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7
https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364
-https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h93bde94_0.tar.bz2#255c7204dda39747c3ba380d28b026d7
-https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.2-py310h5764c6d_0.tar.bz2#6ac13c26fe4f9d8d6b38657664c37fd3
-https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h0868958_9.tar.bz2#5bca71f0cf9b86ec58dd9d6216a3ffaf
-https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed
+https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492
+https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c
+https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1
+https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984
-https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py310h5764c6d_1.tar.bz2#b6f54b7c4177a745d5e6e4319282253a
-https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_4.tar.bz2#505dcf6be997e732d7a33831950dc3cf
-https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py310h5764c6d_0.tar.bz2#c42dcb37acd84b3ca197f03f57ef927d
-https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6
-https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py310h5764c6d_1.tar.bz2#791689ce9e578e2e83b635974af61743
-https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.5-py310hff52083_0.tar.bz2#e572565848d8d19e74983f4d122734a8
-https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py310h5764c6d_1004.tar.bz2#6499bb11b7feffb63b26847fc9181319
-https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_0.tar.bz2#6290f1bc763ed75a42aaea29384f9858
-https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.5-py310hbf28c38_0.tar.bz2#85565efb2bf44e8a5782e7c418d30cfe
-https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py310h597c629_0.tar.bz2#f285746449d16d92884f4ce0cfe26679
-https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.9.1-pyhd8ed1ab_0.tar.bz2#68bb7f24f75b9691c42fd50e178749f5
-https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.3-py310h5764c6d_0.tar.bz2#e12fa8a9fee03765d98a93234ef5a901
-https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-h57caac4_2.tar.bz2#58838c4ca7d1a5948f5cdcbb8170d753
-https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.2.0-hf9f4e7c_0.tar.bz2#3c5f4fbd64c7254fbe246ca9d87863b6
-https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
-https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014
-https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1007.tar.bz2#c2ec7c118184ddfd855fc3698d1c8e63
-https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.0-py310h769672d_0.tar.bz2#06efc4b5f4b418b78de14d1db4a65cad
-https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.0-py310hb1338dc_1.tar.bz2#0ad6207e9d553c67984a5b0b06bbd2a3
-https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.3-py310hff52083_0.tar.bz2#18ef27d620d67af2feef22acfd42cf4a
-https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py310hde88566_2.tar.bz2#a282f30e2e1efa1f210817597e144762
-https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py310hde88566_1.tar.bz2#cbfce984f85c64401e3d4fedf4bc4247
-https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.1-py310hdfbd76f_0.tar.bz2#bfb55d07ad9d15d2f2f8e59afcbcf578
-https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb
-https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py310h5e49deb_0.tar.bz2#2f2c225d04e99ff99d6d3a86692ce968
-https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py310hd8f1fbe_0.tar.bz2#3d311837eadeb8137fca02bdb5a9751f
+https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py310hde88566_3.tar.bz2#0b686f306a76fba9a61e7019f854321f
+https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2
+https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h8b84c32_0.conda#965113c401c7dc9b7a4cd5f9af57e185
+https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py310heca2aa9_0.conda#142c074701cf90c88667b461678aee81
+https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026
+https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py310h5764c6d_1005.tar.bz2#87669c3468dff637bbd0363bc0f895cf
+https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py310hde88566_2.tar.bz2#7433944046deda7775c5b1f7e0b6fe18
+https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py310h34c0648_0.conda#763b301155631438b09e6f2072d3ffaa
+https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96
+https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38
+https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py310he60537e_0.conda#83a21bbd1c6fbeb339ba914fb5e5c02d
+https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py310h9b08913_0.conda#467244b0dbb7da40927ac6ee0e9491de
+https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af
+https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py310h15e2413_1.conda#5be35366687def87437d210fd673100c
+https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554
+https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423
+https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749
-https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_2.tar.bz2#46784478afa27e33b9d5f017c4deb49d
-https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py310hde88566_0.tar.bz2#49790458218da5f86068f32e3938d334
-https://conda.anaconda.org/conda-forge/noarch/identify-2.5.5-pyhd8ed1ab_0.tar.bz2#985ef0c4ed7a26731c419818080ef6ce
-https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.0-pyhd8ed1ab_0.tar.bz2#aee564f0021a2a0ab12239fbdd28e209
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.0-py310h8d5ebf3_0.tar.bz2#001fdef689e7cbcbbce6d5a6ebee90b6
-https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821
-https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py310h55e1e36_102.tar.bz2#588d5bd8f16287b766c509ef173b892d
-https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.10-hc4f8a73_0.tar.bz2#fead2b3178129155c334c751df4daba6
-https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_1.tar.bz2#2e7e3630919d29c8216bfa2cd643d79e
-https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310hd8f1fbe_0.tar.bz2#9e3db99607d6f9285b7348c2af28a095
-https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f
-https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hc525480_0.tar.bz2#abd0f27f5e84cd0d5ae14d22b08795d7
-https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.0-py310hcda3f9e_0.tar.bz2#3e81d6afa50895d6dee115ac5d34c2ea
-https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441
+https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb
+https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa
+https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771
+https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369
+https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557
+https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py310h0a86a1f_103.conda#7f69695b684f2595d9ba1ce26d693b7d
+https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046
+https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934
+https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878
+https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84
+https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314
https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422
https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a
-https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369
-https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py310hff52083_0.tar.bz2#5af49a9342d50006017b897698921f43
-https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310h29803b5_0.tar.bz2#b5fb5328cae86d0b1591fc4894e68238
-https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e
-https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1
-https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py310hd9c82d4_101.tar.bz2#0333d51ee594be40f50b157ac6f27b5a
-https://conda.anaconda.org/conda-forge/linux-64/graphviz-6.0.1-h5abf519_0.tar.bz2#123c55da3e9ea8664f73c70e13ef08c2
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.0-py310hff52083_0.tar.bz2#2db9d22cc226ef79d9cd87fc958c2b04
-https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0
+https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py310hff52083_0.conda#099815f9de141008e85f4ede8c55991c
+https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69
+https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29
+https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py310h515c5ea_102.conda#bf8276009073388b7159736877eccd79
+https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48
+https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2
+https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py310hff52083_0.conda#215e2a4504900bef6d68f520c12ef800
+https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429
https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345
-https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba
+https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2
+https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py310h8deb116_2.conda#a12933d43fc0e55c2e5e00f56196108c
https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c
https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8
https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a
+https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310hcb7e713_0.conda#bd14eaad9bbf54b78e48ecb8b644fcf6
+https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a
diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock
index 096162793f..e87b21a994 100644
--- a/requirements/ci/nox.lock/py38-linux-64.lock
+++ b/requirements/ci/nox.lock/py38-linux-64.lock
@@ -1,59 +1,64 @@
# Generated by conda-lock.
# platform: linux-64
-# input_hash: 34099f3b69d60b791c26fcde2961739ff7cb0f9c144a37335b9f2183abe0dda3
+# input_hash: 0543fd9bbb31e9f896ccf547f3b155d68bb748634268c28dde6ff3ac77aa74d3
@EXPLICIT
https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81
-https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.9.14-ha878542_0.tar.bz2#87c986dab320658abaf3e701406b665c
+https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5
-https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760
-https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7
+https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3
+https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60
https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf
+https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-3_cp38.conda#2f3f7af062b42d664117662612022204
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2
-https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d
+https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373
https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab
-https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c
-https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.7.2-h166bdaf_0.tar.bz2#4a826cd983be6c8fff07a64b6d2079e7
+https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535
+https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f
https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00
https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54
https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a
-https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.9-h27087fc_0.tar.bz2#493ac8b2503a949aebe33d99ea0c284f
-https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_105.tar.bz2#9d3e01547ba04a57372beee01158096f
+https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad
+https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567
https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8
-https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80
-https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h27087fc_1009.tar.bz2#17f91dc8bb7a259b02be5bfb2cd2395f
+https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55
+https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37
https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d
https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f
https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed
-https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268
+https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407
https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3
+https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51
https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c
+https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4
https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd
-https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734
+https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676
https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3
https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3
-https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211
+https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d
https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d
https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206
https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680
https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35
https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f
-https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee
-https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334
+https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52
+https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5
https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d
https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37
-https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_3.tar.bz2#29b2d63b0e21b765da0418bc452538c9
-https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696
+https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41
+https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0
+https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b
+https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1
https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238
-https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e
-https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717
-https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa
+https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1
+https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092
https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19
https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036
https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a
@@ -63,196 +68,203 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.t
https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534
https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98
https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15
-https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605
+https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27
https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0
https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae
-https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa
+https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78
https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f
-https://conda.anaconda.org/conda-forge/linux-64/libcap-2.65-ha37c62d_0.tar.bz2#2c1c43f5442731b58e070bcee45a86ec
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25
+https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657
https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1
-https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336
-https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef
-https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2
-https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe
-https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c
-https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.38-h753d276_0.tar.bz2#575078de1d3a3114b3ce131bd1508d0c
-https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.3-h753d276_0.tar.bz2#ccb2457c73609f2622b8a4b3e42e5d8b
-https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd
+https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd
+https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0
+https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb
+https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e
+https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416
+https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f
+https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906
https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0
https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904
-https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e
-https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2
-https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_1.tar.bz2#62b588b2a313ac3d9c2ead767baa3b5d
-https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h8e90077_6.tar.bz2#2935b98de57e1f261ef8253655a8eb80
+https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8
+https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf
+https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75
+https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b
https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa
https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168
https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867
https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3
-https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_3.tar.bz2#76c717057865201aa2d24b79315645bb
-https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74
-https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685
-https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e
-https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
-https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce
-https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571
-https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
-https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8
+https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295
+https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555
+https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4
+https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06
+https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336
+https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78
https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719
-https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c
+https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9
+https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe
https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad
-https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a
-https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h55922b4_4.tar.bz2#901791f0ec7cddc8714e76e273013a91
-https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b
-https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_1.tar.bz2#0bd292db365c83624316efc2764d9f16
-https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.3-h4ff8645_0.tar.bz2#f03cf4ec974e32b6c5d349f62637e36e
+https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f
+https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7
+https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c
+https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1
+https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea
+https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c
+https://conda.anaconda.org/conda-forge/linux-64/python-3.8.16-he550d4f_1_cpython.conda#9de84cccfbc5f8350a3667bb6ef6fc30
+https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c
https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790
-https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1
-https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-hc2a2eb6_1.tar.bz2#139ace7da04f011abbd531cb2a9840ee
-https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_1.tar.bz2#a61c6312192e7c9de71548a6706a21e6
-https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1003.tar.bz2#9cb956b6605cfc7d8ee1b15e96bd88ba
-https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f
-https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001
-https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617
-https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1
-https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d
-https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98
-https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc
-https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2
-https://conda.anaconda.org/conda-forge/linux-64/python-3.8.13-h582c2e5_0_cpython.tar.bz2#8ec74710472994e2411a8020fa8589ce
-https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4
-https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4
-https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb
-https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0
-https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f
-https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0
-https://conda.anaconda.org/conda-forge/noarch/certifi-2022.9.14-pyhd8ed1ab_0.tar.bz2#963e8ceccba45b5cf15f33906d5a20a1
+https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e
+https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a
+https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b
+https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b
+https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9
+https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418
+https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6
https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e
-https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb
-https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433
-https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7
+https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf
+https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16
+https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb
-https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251
+https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
+https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7
+https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34
+https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a
https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2
-https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526
-https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.8.2-pyhd8ed1ab_0.tar.bz2#140dc6615896e7d4be1059a63370be93
-https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5
-https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e
+https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506
+https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d
+https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2
+https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b
+https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815
+https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed
https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352
-https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905
+https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5
https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9
-https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5
+https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_1.tar.bz2#41ca56d5cac7bfc7eb4fcdbee878eb84
+https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97
+https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77
+https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f
+https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.0-hdc1c0ab_0.conda#c44acb3847ff118c068b662aff858afd
+https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc
+https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5
+https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4
+https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py38h1de0b5d_0.conda#6d97b5d6f06933ab653f1862ddf6e33e
+https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py38h97ac3a3_0.tar.bz2#0c469687a517052c0d581fc6e1a4189d
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19
-https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950
+https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py38h10c12cc_0.conda#05592c85b9f6931dc2df1e80c0d56294
+https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea
+https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b
+https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9
https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727
-https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h93bde94_0.tar.bz2#255c7204dda39747c3ba380d28b026d7
-https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h0868958_9.tar.bz2#5bca71f0cf9b86ec58dd9d6216a3ffaf
-https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054
+https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py38h0a891b7_0.tar.bz2#fe2ef279417faa1af0adf178de2032f7
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025
-https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860
-https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb
-https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da
+https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py38h1de0b5d_0.conda#7db73572d4f7e10a759bad609a228ad0
+https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2
+https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_5.tar.bz2#0856c59f9ddb710c640dc0428d66b1b7
+https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2
https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e
https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7
https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095
https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36
-https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98
-https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022
-https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5
-https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a
-https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda
-https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d
-https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_0.tar.bz2#a970d201055ec06a75db83bf25447eb2
-https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py38h578d9bd_2.tar.bz2#affd6b87adb2b0c98da0e3ad274349be
-https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_2.tar.bz2#153cfb02fb8be7dd7cabcbcb58a63053
-https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.2.0-hf9f4e7c_0.tar.bz2#3c5f4fbd64c7254fbe246ca9d87863b6
-https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py38h578d9bd_0.tar.bz2#037225c33a50e99c5d4f86fac90f6de8
-https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_0.tar.bz2#ae54c61918e1cbd280b8587ed6219258
-https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014
-https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py38h0a891b7_1.tar.bz2#20d003ad5f584e212c299f64cac46c05
-https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38h97ac3a3_2.tar.bz2#fccce86e5fc8183bf2658ac9bfc535b4
+https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py38h0a891b7_1.tar.bz2#358beb228a53b5e1031862de3525d1d3
+https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a
+https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py38h0a891b7_0.tar.bz2#44421904760e9f5ae2035193e04360f0
+https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940
+https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4
+https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4
+https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb
+https://conda.anaconda.org/conda-forge/noarch/zipp-3.13.0-pyhd8ed1ab_0.conda#41b09d997939e83b231c4557a90c3b13
+https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba
+https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1
+https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0
+https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_3.conda#3ac112151c6b6cfe457e976de41af0c5
+https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_1.tar.bz2#dcc025a7bb54374979c500c2e161fac9
+https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py38hfbd4bf9_0.conda#638537863b298151635c05c762a997ab
+https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.0-hdc1c0ab_0.conda#5d9ac94ee84305ada32c3d287d0ec602
+https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py38h0a891b7_1.tar.bz2#62c89ddefed9c5835e228a32b357a28d
+https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4
+https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0
+https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62
+https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.10.2-pyhd8ed1ab_0.conda#de76905f801c22fc43e624058574eab3
+https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
+https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d
+https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572
+https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h26c90d9_1008.tar.bz2#6bc8cd29312f4fc77156b78124e165cd
https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c
-https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.3-py38h3a7f9d9_0.tar.bz2#83ba913fc1174925d4e862eccb53db59
-https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54
-https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py38ha3b2c9c_2.tar.bz2#a077cc2bb9d854074b1cf4607252da7a
-https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836
-https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py38h578d9bd_3.tar.bz2#6ce4ce3d4490a56eb33b52c179609193
+https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py38hde6dc18_1.conda#3de5619d3f556f966189e5251a266125
+https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7
https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364
-https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.2-py38h0a891b7_0.tar.bz2#907a39b6d7443f770ed755885694f864
-https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed
-https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.0-py38hd7890fc_1.tar.bz2#f851bb08c85122fd0e1f66d2072ebf0b
+https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492
+https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c
+https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1
+https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984
-https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py38h0a891b7_1.tar.bz2#69fc64e4f4c13abe0b8df699ddaa1051
-https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_4.tar.bz2#ba24ff01bb38c5cd5be54b45ef685db3
-https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py38h0a891b7_0.tar.bz2#acd276486a0067bee3098590f0952a0f
-https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6
-https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h0a891b7_1.tar.bz2#83df0e9e3faffc295f12607438691465
-https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.5-py38h578d9bd_0.tar.bz2#b2247bb2492e261c25fabbbb2c7a23b5
-https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h0a891b7_1004.tar.bz2#9fcaaca218dcfeb8da806d4fd4824aa0
-https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_0.tar.bz2#df081ec90a13f53fe522c8e876d3f0cf
-https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.5-py38h43d8883_0.tar.bz2#0650a251fd701bbe5ac44e74cf632af8
-https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py38h2b5fc30_0.tar.bz2#28e9acd6f13ed29f27d5550a1cf0554b
-https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.9.1-pyhd8ed1ab_0.tar.bz2#68bb7f24f75b9691c42fd50e178749f5
-https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.3-py38h0a891b7_0.tar.bz2#ff4c112a78161241ca8a7af74de6a50b
-https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-h57caac4_2.tar.bz2#58838c4ca7d1a5948f5cdcbb8170d753
-https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
-https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h71d37f0_1007.tar.bz2#c8d3d8f137f8af7b1daca318131223b1
-https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821
-https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.0-py38h8f669ce_0.tar.bz2#f91da48c62c91659da28bd95559c75ff
-https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.10-hc4f8a73_0.tar.bz2#fead2b3178129155c334c751df4daba6
-https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.3-py38h578d9bd_0.tar.bz2#1fdabff56623511910fef3b418ff07a2
-https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h71d37f0_2.tar.bz2#cdef2f7b0e263e338016da4b77ae4c0b
-https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py38h71d37f0_1.tar.bz2#704f1776af689de568514b0ff9dd0fbe
-https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.1-py38hea3f02b_0.tar.bz2#b232edb409c6a79e5921b3591c56b716
-https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb
-https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py38h3b45516_0.tar.bz2#d8621497bcc7b369ef9cce25d5a58aeb
-https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py38hfa26641_0.tar.bz2#b869c6b54a02c92fac8b10c0d9b32e43
+https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h26c90d9_3.tar.bz2#6e7902b0e96f42fa1b73daa5f65dd669
+https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py38h7e4f40d_0.conda#17f682c947f9cabd348e7276f00c6d85
+https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py38hd07e089_0.conda#84c9262ab4057ed9f80888fcfc4bf60a
+https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py38h8dc9893_0.conda#ea242937718f3dacf253355e1d634535
+https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026
+https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h0a891b7_1005.tar.bz2#e99e08812dfff30fdd17b3f8838e2759
+https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py38h26c90d9_2.tar.bz2#0ea017e84efe45badce6c32f274dbf8e
+https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py38h3d167d9_0.conda#375c00c98c36b0e79aaaf2149e51f27d
+https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96
+https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38
+https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.10.2-pyhd8ed1ab_0.conda#ebf8b116aac3fe86270bfe5f61fe2b80
+https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d
+https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py38hdc8b05c_0.conda#5073966d63a54434d2a2fc41d325b072
+https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af
+https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py38h58d5fe2_1.conda#5286eaec7e93586e4ae05e7d658cd3e2
+https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38h8dc9893_3.conda#7bb0328b4a0f857aeb432426b9a5f908
+https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423
+https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749
-https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_2.tar.bz2#3f6ce81c7d28563fe2af763d9ff43e62
-https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py38h71d37f0_0.tar.bz2#b9e7f6f7509496a4a62906d02dfe3128
-https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441
+https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_3.tar.bz2#82b3797d08a43a101b645becbb938e65
+https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa
+https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py38hd6c3c57_0.conda#dd63f6486ba95c036b6bfe0b5c53d875
+https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557
+https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py38h6b4b75c_103.conda#ea3d2204fc3a7db7d831daa437a58717
+https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046
+https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934
+https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878
+https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84
+https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314
https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422
-https://conda.anaconda.org/conda-forge/noarch/identify-2.5.5-pyhd8ed1ab_0.tar.bz2#985ef0c4ed7a26731c419818080ef6ce
-https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.0-pyhd8ed1ab_0.tar.bz2#aee564f0021a2a0ab12239fbdd28e209
https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.0-py38hb021067_0.tar.bz2#315ee5c0fbee508e739ddfac2bf8f600
-https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py38h2a9f00d_102.tar.bz2#533ae5db3e2367d71a7890efb0aa3cdc
-https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_1.tar.bz2#2e7e3630919d29c8216bfa2cd643d79e
-https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38hfa26641_0.tar.bz2#6ddbd9abb62e70243702c006b81c63e4
-https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f
-https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hc525480_0.tar.bz2#abd0f27f5e84cd0d5ae14d22b08795d7
-https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.0-py38h606536b_0.tar.bz2#38fc3704565e44fb9fcdfaded03eee76
-https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257
-https://conda.anaconda.org/conda-forge/linux-64/graphviz-6.0.1-h5abf519_0.tar.bz2#123c55da3e9ea8664f73c70e13ef08c2
https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369
-https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py38h578d9bd_0.tar.bz2#ac8aa845f1177901eecf1518997ea0a1
-https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38h7492b6b_0.tar.bz2#59ece9f652baf50ee6b842db833896ae
-https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e
-https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.0-py38h578d9bd_0.tar.bz2#602eb908e81892115c1405c9d99abd56
-https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0
+https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py38h578d9bd_0.conda#ae802cf221c9549ce9924e1a3718342d
+https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69
+https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29
+https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py38h4407c66_102.conda#9a5c841acef11d7e4f0bf98cbc6308b3
+https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48
+https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38ha0d8c90_3.conda#e965dc172d67920d058ac2b3a0e27565
+https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py38h578d9bd_0.conda#7fb6ab52eb5de5023445561d86dbd602
+https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429
https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345
-https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba
+https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2
+https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py38h10c12cc_2.conda#d6a3defdc4ab4acd69c04c8ef73d9b57
https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c
https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8
https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a
+https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py38h3d2c718_0.conda#55ba6e3a49c4293302262286a49607d8
+https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a
diff --git a/requirements/ci/nox.lock/py39-linux-64.lock b/requirements/ci/nox.lock/py39-linux-64.lock
index 9d454a2569..f2eb79bc0a 100644
--- a/requirements/ci/nox.lock/py39-linux-64.lock
+++ b/requirements/ci/nox.lock/py39-linux-64.lock
@@ -1,60 +1,65 @@
# Generated by conda-lock.
# platform: linux-64
-# input_hash: 44cf413042165b62fe105f738e80b926629f61c1763d74df419910081521225b
+# input_hash: de178c2d53980747bafc10c4a4387eeb8c700311af7b35a2fcb49f1b441b960b
@EXPLICIT
https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81
-https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.9.14-ha878542_0.tar.bz2#87c986dab320658abaf3e701406b665c
+https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5
-https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.1.0-hdcd56e2_16.tar.bz2#b02605b875559ff99f04351fd5040760
-https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2#6f5ba041a41eb102a1027d9e68731be7
+https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3
+https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60
https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf
-https://conda.anaconda.org/conda-forge/noarch/tzdata-2022c-h191b570_0.tar.bz2#a56386ad31a7322940dd7d03fb3a9979
+https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e
+https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29
-https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.1.0-h69a702a_16.tar.bz2#6bf15e29a20f614b18ae89368260d0a2
-https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.1.0-h8d9b700_16.tar.bz2#f013cf7749536ce43d82afbffdf499ab
+https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d
+https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373
https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab
-https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.1.0-h8d9b700_16.tar.bz2#4f05bc9844f7c101e6e147dab3c88d5c
-https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.7.2-h166bdaf_0.tar.bz2#4a826cd983be6c8fff07a64b6d2079e7
+https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535
+https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f
https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00
https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54
https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a
-https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.9-h27087fc_0.tar.bz2#493ac8b2503a949aebe33d99ea0c284f
-https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_105.tar.bz2#9d3e01547ba04a57372beee01158096f
+https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad
+https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567
https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8
-https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.0-h27087fc_0.tar.bz2#a583d0bc9a85c48e8b07a588d1ac8a80
-https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h27087fc_1009.tar.bz2#17f91dc8bb7a259b02be5bfb2cd2395f
+https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55
+https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37
https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d
https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f
https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed
-https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h166bdaf_2.tar.bz2#ee8b844357a0946870901c7c6f418268
+https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407
https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3
+https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51
https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_7.tar.bz2#f82dc1c78bcf73583f2656433ce2933c
+https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4
https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd
-https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.14-h166bdaf_0.tar.bz2#fc84a0446e4e4fb882e78d786cfb9734
+https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676
https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3
https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3
-https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211
+https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d
https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d
https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206
https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680
https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35
https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f
-https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee
-https://conda.anaconda.org/conda-forge/linux-64/libudev1-249-h166bdaf_4.tar.bz2#dc075ff6fcb46b3d3c7652e543d5f334
+https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52
+https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5
https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d
https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37
-https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.12-h166bdaf_3.tar.bz2#29b2d63b0e21b765da0418bc452538c9
-https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.2-h846660c_100.tar.bz2#36a36fe04b932d4b327e7e81c5c43696
+https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41
+https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0
+https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b
+https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1
https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238
-https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e
-https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1q-h166bdaf_0.tar.bz2#07acc367c7fc8b716770cd5b36d31717
-https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa
+https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1
+https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092
https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19
https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036
https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a
@@ -64,196 +69,203 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.t
https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534
https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98
https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15
-https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605
+https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27
https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0
https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae
-https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_4.tar.bz2#dd3e1941dd06f64cb88647d2f7ff8aaa
+https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78
https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_7.tar.bz2#37a460703214d0d1b421e2a47eb5e6d0
-https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_7.tar.bz2#785a9296ea478eb78c47593c4da6550f
-https://conda.anaconda.org/conda-forge/linux-64/libcap-2.65-ha37c62d_0.tar.bz2#2c1c43f5442731b58e070bcee45a86ec
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82
+https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25
+https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657
https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1
-https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336
-https://conda.anaconda.org/conda-forge/linux-64/libflac-1.3.4-h27087fc_0.tar.bz2#620e52e160fd09eb8772dedd46bb19ef
-https://conda.anaconda.org/conda-forge/linux-64/libglib-2.72.1-h2d90d5f_0.tar.bz2#ebeadbb5fbc44052eeb6f96a2136e3c2
-https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-he0ac6c6_0.tar.bz2#f5759f0c80708fbf9c4836c0cb46d0fe
-https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.47.0-hdcd2b5c_1.tar.bz2#6fe9e31c2b8d0b022626ccac13e6ca3c
-https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.38-h753d276_0.tar.bz2#575078de1d3a3114b3ce131bd1508d0c
-https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.39.3-h753d276_0.tar.bz2#ccb2457c73609f2622b8a4b3e42e5d8b
-https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-haa6b8db_3.tar.bz2#89acee135f0809a18a1f4537390aa2dd
+https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd
+https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0
+https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb
+https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e
+https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416
+https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f
+https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906
https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0
https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904
-https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.14-h22db469_4.tar.bz2#aced7c1f4b4dbfea08e033c6ae97c53e
-https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc869a4a_1.tar.bz2#7a268cf1386d271e576e35ae82149ef2
-https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.30-haf5c9bc_1.tar.bz2#62b588b2a313ac3d9c2ead767baa3b5d
-https://conda.anaconda.org/conda-forge/linux-64/portaudio-19.6.0-h8e90077_6.tar.bz2#2935b98de57e1f261ef8253655a8eb80
+https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8
+https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf
+https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75
+https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b
https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa
https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168
https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867
https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3
-https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.12-h166bdaf_3.tar.bz2#76c717057865201aa2d24b79315645bb
-https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h6239696_4.tar.bz2#adcf0be7897e73e312bd24353b613f74
-https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685
-https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_7.tar.bz2#1699c1211d56a23c66047524cd76796e
-https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
-https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_0.tar.bz2#4e54cbfc47b8c74c2ecc1e7730d8edce
-https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.72.1-h6239696_0.tar.bz2#a3a99cc33279091262bbc4f5ee7c4571
-https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
-https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.3-h3790be6_0.tar.bz2#7d862b05445123144bec92cb1acc8ef8
+https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295
+https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555
+https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4
+https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06
+https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336
+https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78
https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719
-https://conda.anaconda.org/conda-forge/linux-64/libclang13-14.0.6-default_h3a83d3e_0.tar.bz2#cdbd49e0ab5c5a6c522acb8271977d4c
+https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9
+https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe
https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad
-https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.0.31-h9c3ff4c_1.tar.bz2#fc4b6d93da04731db7601f2a1b1dc96a
-https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.4.0-h55922b4_4.tar.bz2#901791f0ec7cddc8714e76e273013a91
-https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b
-https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.30-h28c427c_1.tar.bz2#0bd292db365c83624316efc2764d9f16
-https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.39.3-h4ff8645_0.tar.bz2#f03cf4ec974e32b6c5d349f62637e36e
+https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f
+https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7
+https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c
+https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1
+https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea
+https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c
+https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78
+https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7
https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c
https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790
-https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_7.tar.bz2#3889dec08a472eb0f423e5609c76bde1
-https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.0-hc2a2eb6_1.tar.bz2#139ace7da04f011abbd531cb2a9840ee
-https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.8-hff1cb4f_1.tar.bz2#a61c6312192e7c9de71548a6706a21e6
-https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.18-h8c3723f_1003.tar.bz2#9cb956b6605cfc7d8ee1b15e96bd88ba
-https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f
-https://conda.anaconda.org/conda-forge/linux-64/libclang-14.0.6-default_h2e3cab8_0.tar.bz2#eb70548da697e50cefa7ba939d57d001
-https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h3e49a29_2.tar.bz2#3b88f1d0fe2580594d58d7e44d664617
-https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.83.1-h7bff187_0.tar.bz2#d0c278476dba3b29ee13203784672ab1
-https://conda.anaconda.org/conda-forge/linux-64/libpq-14.5-hd77ab85_0.tar.bz2#d3126b425a04ed2360da1e651cef1b2d
-https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h522a892_0.tar.bz2#802e43f480122a85ae6a34c1909f8f98
-https://conda.anaconda.org/conda-forge/linux-64/nss-3.78-h2350873_0.tar.bz2#ab3df39f96742e6f1a9878b09274c1dc
-https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h7d73246_1.tar.bz2#a11b4df9271a8d7917686725aa04c8f2
-https://conda.anaconda.org/conda-forge/linux-64/python-3.9.13-h9a8a25e_0_cpython.tar.bz2#69bc307cc4d7396c5fccb26bbcc9c379
-https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4
-https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4
-https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb
-https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0
-https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2#6d3ccbc56256204925bfa8378722792f
-https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0
-https://conda.anaconda.org/conda-forge/noarch/certifi-2022.9.14-pyhd8ed1ab_0.tar.bz2#963e8ceccba45b5cf15f33906d5a20a1
+https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e
+https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb
+https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b
+https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b
+https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9
+https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418
+https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6
https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e
-https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2#a6cf47b09786423200d7982d1faa19eb
-https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2#c267da48ce208905d7d976d49dfd9433
-https://conda.anaconda.org/conda-forge/linux-64/curl-7.83.1-h7bff187_0.tar.bz2#ba33b9995f5e691e4f439422d6efafc7
+https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf
+https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16
+https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb
-https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.5-pyhd8ed1ab_0.tar.bz2#f15c3912378a07726093cc94d1e13251
+https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d
+https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7
+https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py39hf3d152e_3.tar.bz2#4f0fa7459a1f40a969aaad418b1c428c
+https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a
https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2
-https://conda.anaconda.org/conda-forge/noarch/filelock-3.8.0-pyhd8ed1ab_0.tar.bz2#10f0218dbd493ab2e5dc6759ddea4526
-https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.8.2-pyhd8ed1ab_0.tar.bz2#140dc6615896e7d4be1059a63370be93
-https://conda.anaconda.org/conda-forge/linux-64/glib-2.72.1-h6239696_0.tar.bz2#1698b7684d3c6a4d1de2ab946f5b0fb5
-https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h08b82f9_0.tar.bz2#de601caacbaa828d845f758e07e3b85e
+https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506
+https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d
+https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2
+https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b
+https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815
+https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed
https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352
-https://conda.anaconda.org/conda-forge/noarch/iniconfig-1.1.1-pyh9f0ad1d_0.tar.bz2#39161f81cc5e5ca45b8226fbb06c6905
+https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5
https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9
-https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h18fbbfe_3.tar.bz2#ea9758cf553476ddf75c789fdd239dc5
+https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411
+https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97
+https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77
+https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f
+https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.0-hdc1c0ab_0.conda#c44acb3847ff118c068b662aff858afd
+https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc
+https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5
+https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4
+https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1
+https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py39h32b9844_0.tar.bz2#b035b507f55bb6a967d86d4b7e059437
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19
-https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.5.2-pyhd8ed1ab_1.tar.bz2#2fb3f88922e7aec26ba652fcdfe13950
+https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py39h7360e5f_0.conda#757070dc7cc33003254888808cd34f1e
+https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea
+https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b
+https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9
https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727
-https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.0-h93bde94_0.tar.bz2#255c7204dda39747c3ba380d28b026d7
-https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-14.0-h0868958_9.tar.bz2#5bca71f0cf9b86ec58dd9d6216a3ffaf
-https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054
+https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py39hb9d737c_0.tar.bz2#12184951da572828fb986b06ffb63eed
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025
-https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-2_cp39.tar.bz2#39adde4247484de2bb4000122fdcf665
-https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2#974bca71d00364630f63f31fa7e059cb
-https://conda.anaconda.org/conda-forge/noarch/setuptools-65.3.0-pyhd8ed1ab_1.tar.bz2#a64c8af7be7a6348c1d9e530f88fa4da
+https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557
+https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2
+https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248
+https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2
https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e
https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9
-https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708
+https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7
https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095
https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36
-https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2#a9d85960bc62d53cc4ea0d1d27f73c98
-https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022
-https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.1-pyhd8ed1ab_0.tar.bz2#a3508a0c850745b875de88aea4c40cc5
-https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb
-https://conda.anaconda.org/conda-forge/noarch/babel-2.10.3-pyhd8ed1ab_0.tar.bz2#72f1c6d03109d7a70087bc1d029a8eda
-https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2#eeec8814bd97b2681f708bb127478d7d
-https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_0.tar.bz2#61e961a94c8fd535e4496b17e7452dfe
-https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py39hf3d152e_2.tar.bz2#fea5dea40592ea943aa56f4935308ee4
-https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.20.3-hd4edc92_2.tar.bz2#153cfb02fb8be7dd7cabcbcb58a63053
-https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-5.2.0-hf9f4e7c_0.tar.bz2#3c5f4fbd64c7254fbe246ca9d87863b6
-https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.11.4-py39hf3d152e_0.tar.bz2#4c2a0eabf0b8980b2c755646a6f750eb
-https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_0.tar.bz2#e8d1310648c189d6d11a2e13f73da1fe
-https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h06c54e2_4.tar.bz2#491803a7356c6a668a84d71f491c4014
-https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.1-py39hb9d737c_1.tar.bz2#7cda413e43b252044a270c2477031c5c
-https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py39h32b9844_2.tar.bz2#b809706525f081610469169b671b2600
+https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py39hb9d737c_1.tar.bz2#8a7d309b08cff6386fe384aa10dd3748
+https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a
+https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08
+https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940
+https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4
+https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4
+https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb
+https://conda.anaconda.org/conda-forge/noarch/zipp-3.13.0-pyhd8ed1ab_0.conda#41b09d997939e83b231c4557a90c3b13
+https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba
+https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1
+https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0
+https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe
+https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1
+https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951
+https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.0-hdc1c0ab_0.conda#5d9ac94ee84305ada32c3d287d0ec602
+https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py39hb9d737c_1.tar.bz2#3f2d104f2fefdd5e8a205dd3aacbf1d7
+https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4
+https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0
+https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62
+https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.10.2-pyhd8ed1ab_0.conda#de76905f801c22fc43e624058574eab3
+https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
+https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d
+https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572
+https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3
https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c
-https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.3-py39hba7629e_0.tar.bz2#320e25179733ec4a2ecffcebc8abbc80
-https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54
-https://conda.anaconda.org/conda-forge/linux-64/pillow-9.2.0-py39hd5dbb17_2.tar.bz2#3b74a959f6a8008f5901de60b3572c09
-https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2#0b43abe4d3ee93e82742d37def53a836
-https://conda.anaconda.org/conda-forge/linux-64/pluggy-1.0.0-py39hf3d152e_3.tar.bz2#c375c89340e563053f3656c7f134d265
+https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py39h2320bf1_1.conda#d2f79132b9c8e416058a4cd84ef27b3d
+https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7
https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364
-https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.2-py39hb9d737c_0.tar.bz2#1e7ffe59e21862559e06b981817e5058
-https://conda.anaconda.org/conda-forge/noarch/pygments-2.13.0-pyhd8ed1ab_0.tar.bz2#9f478e8eedd301008b5f395bad0caaed
-https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.0-py39h2c22827_1.tar.bz2#a1ca42c2a746601d42f27bbcb7f6acfc
+https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492
+https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c
+https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1
+https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984
-https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.0.0-py39hb9d737c_1.tar.bz2#9f71f72dad4fd7b9da7bcc2ba64505bc
-https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_4.tar.bz2#dcc47a3b751508507183d17e569805e5
-https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py39hb9d737c_0.tar.bz2#a3c57360af28c0d9956622af99a521cd
-https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.3.0-hd8ed1ab_0.tar.bz2#f3e98e944832fb271a0dbda7b7771dc6
-https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py39hb9d737c_1.tar.bz2#ef84376736d1e8a814ccb06d1d814e6f
-https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.16.5-py39hf3d152e_0.tar.bz2#165e71a44187ac22e2e1669fd3ca2392
-https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py39hb9d737c_1004.tar.bz2#05a99367d885ec9990f25e74128a8a08
-https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_0.tar.bz2#4b108127973b66b36edd6449aa6afde0
-https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.5-py39hf939315_0.tar.bz2#c9ff0dfb602033b1f1aaf323b58e04fa
-https://conda.anaconda.org/conda-forge/linux-64/cryptography-37.0.4-py39hd97740a_0.tar.bz2#edc3668e7b71657237f94cf25e286478
-https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.9.1-pyhd8ed1ab_0.tar.bz2#68bb7f24f75b9691c42fd50e178749f5
-https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.37.3-py39hb9d737c_0.tar.bz2#21622fe576fcce5b861036e8d7282470
-https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.20.3-h57caac4_2.tar.bz2#58838c4ca7d1a5948f5cdcbb8170d753
-https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37
-https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39hd257fcd_1007.tar.bz2#e7527bcf8da0dad996aaefd046c17480
-https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_hd09bd1e_0.tar.bz2#247c70ce54beeb3e60def44061576821
-https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.0-py39h4661b88_0.tar.bz2#ae807099430cd22b09b869b0536425b7
-https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.10-hc4f8a73_0.tar.bz2#fead2b3178129155c334c751df4daba6
-https://conda.anaconda.org/conda-forge/linux-64/pytest-7.1.3-py39hf3d152e_0.tar.bz2#b807481ba94ec32bc742f2fe775d0bff
-https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py39hd257fcd_2.tar.bz2#644be766007a1dc7590c3277647f81a1
-https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.3.0-py39hd257fcd_1.tar.bz2#c4b698994b2d8d2e659ae02202e6abe4
-https://conda.anaconda.org/conda-forge/linux-64/scipy-1.9.1-py39h8ba3f38_0.tar.bz2#beed054d4979cd70690aea2b257a6d55
-https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.0.5-pyhd8ed1ab_0.tar.bz2#743074b7a216807886f7e8f6d497cceb
-https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.4-py39h68ae834_0.tar.bz2#e871ee7de5bfa95095256e95e30be2a6
-https://conda.anaconda.org/conda-forge/linux-64/sip-6.6.2-py39h5a03fae_0.tar.bz2#e37704c6be07b8b14ffc1ce912802ce0
+https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py39h2ae25f5_3.tar.bz2#bcc7de3bb458a198b598ac1f75bf37e3
+https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122
+https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hc9151fd_0.conda#d26cc40830285883abaa766a7f7798bf
+https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py39h227be39_0.conda#7d9a35091552af3655151f164ddd64a3
+https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026
+https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py39hb9d737c_1005.tar.bz2#a639fdd9428d8b25f8326a3838d54045
+https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py39h2ae25f5_2.tar.bz2#b3b4aab96d1c4ed394d6f4b9146699d4
+https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py39h079d5ae_0.conda#3245013812dfbff6a22e57533ac6f69d
+https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2
+https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96
+https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38
+https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.10.2-pyhd8ed1ab_0.conda#ebf8b116aac3fe86270bfe5f61fe2b80
+https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d
+https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py39h2ad29b5_0.conda#3ea96adbbc2a66fa45178102a9cfbecc
+https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af
+https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py39hf14cbfd_1.conda#67766c515601b3ee1514072d6fd060bb
+https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1
+https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423
+https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1
https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749
-https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_2.tar.bz2#5a3bb9dc2fe08a4a6f2b61548a1431d6
-https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py39hd257fcd_0.tar.bz2#e0f1f1d3013be31359d3ac635b288469
-https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h5a1934d_102.tar.bz2#bb8bdfa5e3e9e3f6ec861f05cd2ad441
+https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a
+https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa
+https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py39he190548_0.conda#62d6ddd9e534f4d325d12470cc4961ab
+https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557
+https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py39h94a714e_103.conda#ee29e7176b5854fa09ec17b101945401
+https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046
+https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934
+https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878
+https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84
+https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314
https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422
-https://conda.anaconda.org/conda-forge/noarch/identify-2.5.5-pyhd8ed1ab_0.tar.bz2#985ef0c4ed7a26731c419818080ef6ce
-https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.0-pyhd8ed1ab_0.tar.bz2#aee564f0021a2a0ab12239fbdd28e209
https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.6.0-py39hf9fd14e_0.tar.bz2#bdc55b4069ab9d2f938525c4cf90def0
-https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.0-nompi_py39h6ced12a_102.tar.bz2#b92600d0fef7f12f426935d87d6413e6
-https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_1.tar.bz2#2e7e3630919d29c8216bfa2cd643d79e
-https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h5a03fae_0.tar.bz2#1fd9112714d50ee5be3dbf4fd23964dc
-https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.4.0-pyhd8ed1ab_0.tar.bz2#95286e05a617de9ebfe3246cecbfb72f
-https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.6-hc525480_0.tar.bz2#abd0f27f5e84cd0d5ae14d22b08795d7
-https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.0-py39hf5d525c_0.tar.bz2#b99ba7383d1c9dd18445dfff08439c48
-https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py39h8bb458d_101.tar.bz2#347f324dd99dfb0b1479a466213b55bf
-https://conda.anaconda.org/conda-forge/linux-64/graphviz-6.0.1-h5abf519_0.tar.bz2#123c55da3e9ea8664f73c70e13ef08c2
https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369
-https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.20.0-py39hf3d152e_0.tar.bz2#314c8cb1538706f62ec36cf64370f2b2
-https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h18e9c17_0.tar.bz2#5ed8f83afff3b64fa91f7a6af8d7ff04
-https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e
-https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2#0738978569b10669bdef41c671252dd1
-https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.6.0-py39hf3d152e_0.tar.bz2#93f29e4d6f852de18384412b0e0d03b5
-https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2#089382ee0e2dc2eae33a04cc3c2bddb0
+https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py39hf3d152e_0.conda#8a98273ee904735747a8f6706b187f3e
+https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69
+https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29
+https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py39h3088dd8_102.conda#a022e48c8b12bc56083bcce841978519
+https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48
+https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6
+https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983
+https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py39hf3d152e_0.conda#0967228e228ebeded6a36a6f4d5509ed
+https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429
https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345
-https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.8.1-pyhd8ed1ab_0.tar.bz2#7d8390ec71225ea9841b276552fdffba
+https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2
+https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py39h7360e5f_2.conda#fbee2ab3fe7729f2ff5c5699d58e40b9
https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c
https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8
https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a
+https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h6e7ad6e_0.conda#7cb72bd5b1e7c5a23a062db90889356b
+https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a
diff --git a/requirements/ci/py310.yml b/requirements/ci/py310.yml
index 76ca9e4f58..d79015c055 100644
--- a/requirements/ci/py310.yml
+++ b/requirements/ci/py310.yml
@@ -15,8 +15,8 @@ dependencies:
- cf-units >=3.1
- cftime >=1.5
- dask-core >=2.26
- - matplotlib
- - netcdf4 !=1.6.1
+ - matplotlib >=3.5
+ - netcdf4 <1.6.1
- numpy >=1.19
- python-xxhash
- pyproj
@@ -48,4 +48,8 @@ dependencies:
- sphinx-copybutton
- sphinx-gallery >=0.11.0
- sphinx-panels
- - pydata-sphinx-theme = 0.8.1
+ - pydata-sphinx-theme
+
+# Temporary minimum pins.
+# See https://github.com/SciTools/iris/pull/5051
+ - graphviz >=6.0.0
diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml
index 5a8c878ee1..b68e8ccf45 100644
--- a/requirements/ci/py38.yml
+++ b/requirements/ci/py38.yml
@@ -15,8 +15,8 @@ dependencies:
- cf-units >=3.1
- cftime >=1.5
- dask-core >=2.26
- - matplotlib
- - netcdf4 !=1.6.1
+ - matplotlib >=3.5
+ - netcdf4 <1.6.1
- numpy >=1.19
- python-xxhash
- pyproj
@@ -48,4 +48,8 @@ dependencies:
- sphinx-copybutton
- sphinx-gallery >=0.11.0
- sphinx-panels
- - pydata-sphinx-theme = 0.8.1
+ - pydata-sphinx-theme
+
+# Temporary minimum pins.
+# See https://github.com/SciTools/iris/pull/5051
+ - graphviz >=6.0.0
diff --git a/requirements/ci/py39.yml b/requirements/ci/py39.yml
index 7931e20336..9fec76cfde 100644
--- a/requirements/ci/py39.yml
+++ b/requirements/ci/py39.yml
@@ -15,8 +15,8 @@ dependencies:
- cf-units >=3.1
- cftime >=1.5
- dask-core >=2.26
- - matplotlib
- - netcdf4 !=1.6.1
+ - matplotlib >=3.5
+ - netcdf4 <1.6.1
- numpy >=1.19
- python-xxhash
- pyproj
@@ -48,4 +48,8 @@ dependencies:
- sphinx-copybutton
- sphinx-gallery >=0.11.0
- sphinx-panels
- - pydata-sphinx-theme = 0.8.1
+ - pydata-sphinx-theme
+
+# Temporary minimum pins.
+# See https://github.com/SciTools/iris/pull/5051
+ - graphviz >=6.0.0
diff --git a/setup.cfg b/setup.cfg
index 92cbe4747c..75647e6623 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -51,8 +51,8 @@ install_requires =
cf-units>=3.1
cftime>=1.5.0
dask[array]>=2.26
- matplotlib
- netcdf4!=1.6.1
+ matplotlib>=3.5
+ netcdf4<1.6.1
numpy>=1.19
scipy
shapely!=1.8.3
@@ -69,7 +69,7 @@ where = lib
[options.extras_require]
docs =
- sphinx
+ sphinx<5
sphinx-copybutton
sphinx-gallery>=0.11.0
sphinx_rtd_theme
| |